merge
This commit is contained in:
parent
181b5539f6
commit
e51d1bfade
172 changed files with 49629 additions and 49629 deletions
100
.travis.yml
100
.travis.yml
|
|
@ -1,50 +1,50 @@
|
|||
language: python
|
||||
sudo: false
|
||||
services:
|
||||
- docker
|
||||
python:
|
||||
- 2.7
|
||||
- 3.6
|
||||
env:
|
||||
- TEST_SERVER_MODE=false
|
||||
- TEST_SERVER_MODE=true
|
||||
# Due to incomplete Python 3.7 support on Travis CI (
|
||||
# https://github.com/travis-ci/travis-ci/issues/9815),
|
||||
# using a matrix is necessary
|
||||
matrix:
|
||||
include:
|
||||
- python: 3.7
|
||||
env: TEST_SERVER_MODE=false
|
||||
dist: xenial
|
||||
sudo: true
|
||||
- python: 3.7
|
||||
env: TEST_SERVER_MODE=true
|
||||
dist: xenial
|
||||
sudo: true
|
||||
before_install:
|
||||
- export BOTO_CONFIG=/dev/null
|
||||
install:
|
||||
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
||||
# -d for docker run so the logs show up in travis
|
||||
# Python images come from here: https://hub.docker.com/_/python/
|
||||
- |
|
||||
python setup.py sdist
|
||||
|
||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${TRAVIS_PYTHON_VERSION}-stretch /moto/travis_moto_server.sh &
|
||||
export AWS_SECRET_ACCESS_KEY=foobar_secret
|
||||
export AWS_ACCESS_KEY_ID=foobar_key
|
||||
fi
|
||||
travis_retry pip install boto==2.45.0
|
||||
travis_retry pip install boto3
|
||||
travis_retry pip install dist/moto*.gz
|
||||
travis_retry pip install coveralls==1.1
|
||||
travis_retry pip install -r requirements-dev.txt
|
||||
|
||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||
python wait_for.py
|
||||
fi
|
||||
script:
|
||||
- make test
|
||||
after_success:
|
||||
- coveralls
|
||||
pyhlanguage: python
|
||||
sudo: false
|
||||
services:
|
||||
- docker
|
||||
python:
|
||||
- 2.7
|
||||
- 3.6
|
||||
env:
|
||||
- TEST_SERVER_MODE=false
|
||||
- TEST_SERVER_MODE=true
|
||||
# Due to incomplete Python 3.7 support on Travis CI (
|
||||
# https://github.com/travis-ci/travis-ci/issues/9815),
|
||||
# using a matrix is necessary
|
||||
matrix:
|
||||
include:
|
||||
- python: 3.7
|
||||
env: TEST_SERVER_MODE=false
|
||||
dist: xenial
|
||||
sudo: true
|
||||
- python: 3.7
|
||||
env: TEST_SERVER_MODE=true
|
||||
dist: xenial
|
||||
sudo: true
|
||||
before_install:
|
||||
- export BOTO_CONFIG=/dev/null
|
||||
install:
|
||||
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
||||
# -d for docker run so the logs show up in travis
|
||||
# Python images come from here: https://hub.docker.com/_/python/
|
||||
- |
|
||||
python setup.py sdist
|
||||
|
||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${TRAVIS_PYTHON_VERSION}-stretch /moto/travis_moto_server.sh &
|
||||
export AWS_SECRET_ACCESS_KEY=foobar_secret
|
||||
export AWS_ACCESS_KEY_ID=foobar_key
|
||||
fi
|
||||
travis_retry pip install boto==2.45.0
|
||||
travis_retry pip install boto3
|
||||
travis_retry pip install dist/moto*.gz
|
||||
travis_retry pip install coveralls==1.1
|
||||
travis_retry pip install -r requirements-dev.txt
|
||||
|
||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||
python wait_for.py
|
||||
fi
|
||||
script:
|
||||
- make test
|
||||
after_success:
|
||||
- coveralls
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
-r requirements.txt
|
||||
mock
|
||||
nose
|
||||
sure==1.4.11
|
||||
coverage
|
||||
flake8==3.5.0
|
||||
freezegun
|
||||
flask
|
||||
boto>=2.45.0
|
||||
boto3>=1.4.4
|
||||
botocore>=1.8.36
|
||||
six>=1.9
|
||||
prompt-toolkit==1.0.14
|
||||
click==6.7
|
||||
inflection==0.3.1
|
||||
lxml==4.2.3
|
||||
beautifulsoup4==4.6.0
|
||||
-r requirements.txt
|
||||
mock
|
||||
nose
|
||||
sure==1.4.11
|
||||
coverage
|
||||
flake8==3.5.0
|
||||
freezegun
|
||||
flask
|
||||
boto>=2.45.0
|
||||
boto3>=1.4.4
|
||||
botocore>=1.12.13
|
||||
six>=1.9
|
||||
prompt-toolkit==1.0.14
|
||||
click==6.7
|
||||
inflection==0.3.1
|
||||
lxml==4.2.3
|
||||
beautifulsoup4==4.6.0
|
||||
142
setup.py
142
setup.py
|
|
@ -1,71 +1,71 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
import setuptools
|
||||
from setuptools import setup, find_packages
|
||||
import sys
|
||||
|
||||
|
||||
install_requires = [
|
||||
"Jinja2>=2.7.3",
|
||||
"boto>=2.36.0",
|
||||
"boto3>=1.6.16,<1.8",
|
||||
"botocore>=1.9.16,<1.11",
|
||||
"cryptography>=2.3.0",
|
||||
"requests>=2.5",
|
||||
"xmltodict",
|
||||
"six>1.9",
|
||||
"werkzeug",
|
||||
"pyaml",
|
||||
"pytz",
|
||||
"python-dateutil<3.0.0,>=2.1",
|
||||
"python-jose<3.0.0",
|
||||
"mock",
|
||||
"docker>=2.5.1",
|
||||
"jsondiff==1.1.1",
|
||||
"aws-xray-sdk<0.96,>=0.93",
|
||||
"responses>=0.9.0",
|
||||
]
|
||||
|
||||
extras_require = {
|
||||
'server': ['flask'],
|
||||
}
|
||||
|
||||
# https://hynek.me/articles/conditional-python-dependencies/
|
||||
if int(setuptools.__version__.split(".", 1)[0]) < 18:
|
||||
if sys.version_info[0:2] < (3, 3):
|
||||
install_requires.append("backports.tempfile")
|
||||
else:
|
||||
extras_require[":python_version<'3.3'"] = ["backports.tempfile"]
|
||||
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
version='1.3.6',
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
author='Steve Pulec',
|
||||
author_email='spulec@gmail.com',
|
||||
url='https://github.com/spulec/moto',
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'moto_server = moto.server:main',
|
||||
],
|
||||
},
|
||||
packages=find_packages(exclude=("tests", "tests.*")),
|
||||
install_requires=install_requires,
|
||||
extras_require=extras_require,
|
||||
include_package_data=True,
|
||||
license="Apache",
|
||||
test_suite="tests",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Topic :: Software Development :: Testing",
|
||||
],
|
||||
)
|
||||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
import setuptools
|
||||
from setuptools import setup, find_packages
|
||||
import sys
|
||||
|
||||
|
||||
install_requires = [
|
||||
"Jinja2>=2.7.3",
|
||||
"boto>=2.36.0",
|
||||
"boto3>=1.6.16",
|
||||
"botocore>=1.12.13",
|
||||
"cryptography>=2.3.0",
|
||||
"requests>=2.5",
|
||||
"xmltodict",
|
||||
"six>1.9",
|
||||
"werkzeug",
|
||||
"pyaml",
|
||||
"pytz",
|
||||
"python-dateutil<3.0.0,>=2.1",
|
||||
"python-jose<3.0.0",
|
||||
"mock",
|
||||
"docker>=2.5.1",
|
||||
"jsondiff==1.1.1",
|
||||
"aws-xray-sdk!=0.96,>=0.93",
|
||||
"responses>=0.9.0",
|
||||
]
|
||||
|
||||
extras_require = {
|
||||
'server': ['flask'],
|
||||
}
|
||||
|
||||
# https://hynek.me/articles/conditional-python-dependencies/
|
||||
if int(setuptools.__version__.split(".", 1)[0]) < 18:
|
||||
if sys.version_info[0:2] < (3, 3):
|
||||
install_requires.append("backports.tempfile")
|
||||
else:
|
||||
extras_require[":python_version<'3.3'"] = ["backports.tempfile"]
|
||||
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
version='1.3.7',
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
author='Steve Pulec',
|
||||
author_email='spulec@gmail.com',
|
||||
url='https://github.com/spulec/moto',
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'moto_server = moto.server:main',
|
||||
],
|
||||
},
|
||||
packages=find_packages(exclude=("tests", "tests.*")),
|
||||
install_requires=install_requires,
|
||||
extras_require=extras_require,
|
||||
include_package_data=True,
|
||||
license="Apache",
|
||||
test_suite="tests",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Topic :: Software Development :: Testing",
|
||||
],
|
||||
)
|
||||
|
|
@ -1,345 +1,345 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template Gollum_Single_Instance_With_EBS_Volume: Gollum is a simple wiki system built on top of Git that powers GitHub Wikis. This template installs a Gollum Wiki stack on a single EC2 instance with an EBS volume for storage and demonstrates using the AWS CloudFormation bootstrap scripts to install the packages and files necessary at instance launch time. **WARNING** This template creates an Amazon EC2 instance and an EBS volume. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": "The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String"
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instances",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters."
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge"
|
||||
]
|
||||
},
|
||||
"VolumeSize": {
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"Default": "5",
|
||||
"Type": "Number",
|
||||
"MaxValue": "1024",
|
||||
"MinValue": "5",
|
||||
"ConstraintDescription": "must be between 5 and 1024 Gb."
|
||||
}
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"WebsiteURL": {
|
||||
"Description": "URL for Gollum wiki",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"http://",
|
||||
{
|
||||
"Fn::GetAtt": [
|
||||
"WebServer",
|
||||
"PublicDnsName"
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"WebServerSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80"
|
||||
},
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {
|
||||
"Ref": "SSHLocation"
|
||||
},
|
||||
"FromPort": "22"
|
||||
}
|
||||
],
|
||||
"GroupDescription": "Enable SSH access and HTTP access on the inbound port"
|
||||
}
|
||||
},
|
||||
"WebServer": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash -v\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
" /opt/aws/bin/cfn-signal -e 1 -r \"$1\" '",
|
||||
{
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install Rails packages\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServer ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Wait for the EBS volume to show up\n",
|
||||
"while [ ! -e /dev/sdh ]; do echo Waiting for EBS volume to attach; sleep 5; done\n",
|
||||
"# Format the EBS volume and mount it\n",
|
||||
"mkdir /var/wikidata\n",
|
||||
"/sbin/mkfs -t ext3 /dev/sdh1\n",
|
||||
"mount /dev/sdh1 /var/wikidata\n",
|
||||
"# Initialize the wiki and fire up the server\n",
|
||||
"cd /var/wikidata\n",
|
||||
"git init\n",
|
||||
"gollum --port 80 --host 0.0.0.0 &\n",
|
||||
"# If all is well so signal success\n",
|
||||
"/opt/aws/bin/cfn-signal -e $? -r \"Rails application setup complete\" '",
|
||||
{
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"'\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"KeyName": {
|
||||
"Ref": "KeyName"
|
||||
},
|
||||
"SecurityGroups": [
|
||||
{
|
||||
"Ref": "WebServerSecurityGroup"
|
||||
}
|
||||
],
|
||||
"InstanceType": {
|
||||
"Ref": "InstanceType"
|
||||
},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": [
|
||||
"AWSRegionArch2AMI",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
{
|
||||
"Fn::FindInMap": [
|
||||
"AWSInstanceType2Arch",
|
||||
{
|
||||
"Ref": "InstanceType"
|
||||
},
|
||||
"Arch"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"Metadata": {
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"packages": {
|
||||
"rubygems": {
|
||||
"nokogiri": [
|
||||
"1.5.10"
|
||||
],
|
||||
"rdiscount": [],
|
||||
"gollum": [
|
||||
"1.1.1"
|
||||
]
|
||||
},
|
||||
"yum": {
|
||||
"libxslt-devel": [],
|
||||
"gcc": [],
|
||||
"git": [],
|
||||
"rubygems": [],
|
||||
"ruby-devel": [],
|
||||
"ruby-rdoc": [],
|
||||
"make": [],
|
||||
"libxml2-devel": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"DataVolume": {
|
||||
"Type": "AWS::EC2::Volume",
|
||||
"Properties": {
|
||||
"Tags": [
|
||||
{
|
||||
"Value": "Gollum Data Volume",
|
||||
"Key": "Usage"
|
||||
}
|
||||
],
|
||||
"AvailabilityZone": {
|
||||
"Fn::GetAtt": [
|
||||
"WebServer",
|
||||
"AvailabilityZone"
|
||||
]
|
||||
},
|
||||
"Size": "100",
|
||||
}
|
||||
},
|
||||
"MountPoint": {
|
||||
"Type": "AWS::EC2::VolumeAttachment",
|
||||
"Properties": {
|
||||
"InstanceId": {
|
||||
"Ref": "WebServer"
|
||||
},
|
||||
"Device": "/dev/sdh",
|
||||
"VolumeId": {
|
||||
"Ref": "DataVolume"
|
||||
}
|
||||
}
|
||||
},
|
||||
"WaitCondition": {
|
||||
"DependsOn": "MountPoint",
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {
|
||||
"Handle": {
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"Timeout": "300"
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment1": "Note that the WaitCondition is dependent on the volume mount point allowing the volume to be created and attached to the EC2 instance",
|
||||
"Comment2": "The instance bootstrap script waits for the volume to be attached to the instance prior to installing Gollum and signalling completion"
|
||||
}
|
||||
},
|
||||
"WaitHandle": {
|
||||
"Type": "AWS::CloudFormation::WaitConditionHandle"
|
||||
}
|
||||
},
|
||||
"Mappings": {
|
||||
"AWSInstanceType2Arch": {
|
||||
"m3.2xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m2.2xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m1.small": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"c1.medium": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cg1.4xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m2.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"t1.micro": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cc1.4xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m1.medium": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cc2.8xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m1.large": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m1.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m2.4xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"c1.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m3.xlarge": {
|
||||
"Arch": "64"
|
||||
}
|
||||
},
|
||||
"AWSRegionArch2AMI": {
|
||||
"ap-southeast-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-b4b0cae6",
|
||||
"64": "ami-beb0caec"
|
||||
},
|
||||
"ap-southeast-2": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-b3990e89",
|
||||
"64": "ami-bd990e87"
|
||||
},
|
||||
"us-west-2": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-38fe7308",
|
||||
"64": "ami-30fe7300"
|
||||
},
|
||||
"us-east-1": {
|
||||
"64HVM": "ami-0da96764",
|
||||
"32": "ami-31814f58",
|
||||
"64": "ami-1b814f72"
|
||||
},
|
||||
"ap-northeast-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-0644f007",
|
||||
"64": "ami-0a44f00b"
|
||||
},
|
||||
"us-west-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-11d68a54",
|
||||
"64": "ami-1bd68a5e"
|
||||
},
|
||||
"eu-west-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-973b06e3",
|
||||
"64": "ami-953b06e1"
|
||||
},
|
||||
"sa-east-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-3e3be423",
|
||||
"64": "ami-3c3be421"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template Gollum_Single_Instance_With_EBS_Volume: Gollum is a simple wiki system built on top of Git that powers GitHub Wikis. This template installs a Gollum Wiki stack on a single EC2 instance with an EBS volume for storage and demonstrates using the AWS CloudFormation bootstrap scripts to install the packages and files necessary at instance launch time. **WARNING** This template creates an Amazon EC2 instance and an EBS volume. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": "The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String"
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instances",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters."
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge"
|
||||
]
|
||||
},
|
||||
"VolumeSize": {
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"Default": "5",
|
||||
"Type": "Number",
|
||||
"MaxValue": "1024",
|
||||
"MinValue": "5",
|
||||
"ConstraintDescription": "must be between 5 and 1024 Gb."
|
||||
}
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"WebsiteURL": {
|
||||
"Description": "URL for Gollum wiki",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"http://",
|
||||
{
|
||||
"Fn::GetAtt": [
|
||||
"WebServer",
|
||||
"PublicDnsName"
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"WebServerSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80"
|
||||
},
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {
|
||||
"Ref": "SSHLocation"
|
||||
},
|
||||
"FromPort": "22"
|
||||
}
|
||||
],
|
||||
"GroupDescription": "Enable SSH access and HTTP access on the inbound port"
|
||||
}
|
||||
},
|
||||
"WebServer": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash -v\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
" /opt/aws/bin/cfn-signal -e 1 -r \"$1\" '",
|
||||
{
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install Rails packages\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServer ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Wait for the EBS volume to show up\n",
|
||||
"while [ ! -e /dev/sdh ]; do echo Waiting for EBS volume to attach; sleep 5; done\n",
|
||||
"# Format the EBS volume and mount it\n",
|
||||
"mkdir /var/wikidata\n",
|
||||
"/sbin/mkfs -t ext3 /dev/sdh1\n",
|
||||
"mount /dev/sdh1 /var/wikidata\n",
|
||||
"# Initialize the wiki and fire up the server\n",
|
||||
"cd /var/wikidata\n",
|
||||
"git init\n",
|
||||
"gollum --port 80 --host 0.0.0.0 &\n",
|
||||
"# If all is well so signal success\n",
|
||||
"/opt/aws/bin/cfn-signal -e $? -r \"Rails application setup complete\" '",
|
||||
{
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"'\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"KeyName": {
|
||||
"Ref": "KeyName"
|
||||
},
|
||||
"SecurityGroups": [
|
||||
{
|
||||
"Ref": "WebServerSecurityGroup"
|
||||
}
|
||||
],
|
||||
"InstanceType": {
|
||||
"Ref": "InstanceType"
|
||||
},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": [
|
||||
"AWSRegionArch2AMI",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
{
|
||||
"Fn::FindInMap": [
|
||||
"AWSInstanceType2Arch",
|
||||
{
|
||||
"Ref": "InstanceType"
|
||||
},
|
||||
"Arch"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"Metadata": {
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"packages": {
|
||||
"rubygems": {
|
||||
"nokogiri": [
|
||||
"1.5.10"
|
||||
],
|
||||
"rdiscount": [],
|
||||
"gollum": [
|
||||
"1.1.1"
|
||||
]
|
||||
},
|
||||
"yum": {
|
||||
"libxslt-devel": [],
|
||||
"gcc": [],
|
||||
"git": [],
|
||||
"rubygems": [],
|
||||
"ruby-devel": [],
|
||||
"ruby-rdoc": [],
|
||||
"make": [],
|
||||
"libxml2-devel": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"DataVolume": {
|
||||
"Type": "AWS::EC2::Volume",
|
||||
"Properties": {
|
||||
"Tags": [
|
||||
{
|
||||
"Value": "Gollum Data Volume",
|
||||
"Key": "Usage"
|
||||
}
|
||||
],
|
||||
"AvailabilityZone": {
|
||||
"Fn::GetAtt": [
|
||||
"WebServer",
|
||||
"AvailabilityZone"
|
||||
]
|
||||
},
|
||||
"Size": "100",
|
||||
}
|
||||
},
|
||||
"MountPoint": {
|
||||
"Type": "AWS::EC2::VolumeAttachment",
|
||||
"Properties": {
|
||||
"InstanceId": {
|
||||
"Ref": "WebServer"
|
||||
},
|
||||
"Device": "/dev/sdh",
|
||||
"VolumeId": {
|
||||
"Ref": "DataVolume"
|
||||
}
|
||||
}
|
||||
},
|
||||
"WaitCondition": {
|
||||
"DependsOn": "MountPoint",
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {
|
||||
"Handle": {
|
||||
"Ref": "WaitHandle"
|
||||
},
|
||||
"Timeout": "300"
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment1": "Note that the WaitCondition is dependent on the volume mount point allowing the volume to be created and attached to the EC2 instance",
|
||||
"Comment2": "The instance bootstrap script waits for the volume to be attached to the instance prior to installing Gollum and signalling completion"
|
||||
}
|
||||
},
|
||||
"WaitHandle": {
|
||||
"Type": "AWS::CloudFormation::WaitConditionHandle"
|
||||
}
|
||||
},
|
||||
"Mappings": {
|
||||
"AWSInstanceType2Arch": {
|
||||
"m3.2xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m2.2xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m1.small": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"c1.medium": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cg1.4xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m2.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"t1.micro": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cc1.4xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m1.medium": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"cc2.8xlarge": {
|
||||
"Arch": "64HVM"
|
||||
},
|
||||
"m1.large": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m1.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m2.4xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"c1.xlarge": {
|
||||
"Arch": "64"
|
||||
},
|
||||
"m3.xlarge": {
|
||||
"Arch": "64"
|
||||
}
|
||||
},
|
||||
"AWSRegionArch2AMI": {
|
||||
"ap-southeast-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-b4b0cae6",
|
||||
"64": "ami-beb0caec"
|
||||
},
|
||||
"ap-southeast-2": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-b3990e89",
|
||||
"64": "ami-bd990e87"
|
||||
},
|
||||
"us-west-2": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-38fe7308",
|
||||
"64": "ami-30fe7300"
|
||||
},
|
||||
"us-east-1": {
|
||||
"64HVM": "ami-0da96764",
|
||||
"32": "ami-31814f58",
|
||||
"64": "ami-1b814f72"
|
||||
},
|
||||
"ap-northeast-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-0644f007",
|
||||
"64": "ami-0a44f00b"
|
||||
},
|
||||
"us-west-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-11d68a54",
|
||||
"64": "ami-1bd68a5e"
|
||||
},
|
||||
"eu-west-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-973b06e3",
|
||||
"64": "ami-953b06e1"
|
||||
},
|
||||
"sa-east-1": {
|
||||
"64HVM": "NOT_YET_SUPPORTED",
|
||||
"32": "ami-3e3be423",
|
||||
"64": "ami-3c3be421"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Resources": {
|
||||
"VPCEIP": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {
|
||||
"Domain": "vpc"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Resources": {
|
||||
"VPCEIP": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {
|
||||
"Domain": "vpc"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,34 +1,34 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Description": "VPC ENI Test CloudFormation",
|
||||
"Resources": {
|
||||
"ENI": {
|
||||
"Type": "AWS::EC2::NetworkInterface",
|
||||
"Properties": {
|
||||
"SubnetId": {"Ref": "Subnet"}
|
||||
}
|
||||
},
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"AvailabilityZone": "us-east-1a",
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"CidrBlock": "10.0.0.0/24"
|
||||
}
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs": {
|
||||
"NinjaENI": {
|
||||
"Description": "Elastic IP mapping to Auto-Scaling Group",
|
||||
"Value": {"Ref": "ENI"}
|
||||
}
|
||||
}
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Description": "VPC ENI Test CloudFormation",
|
||||
"Resources": {
|
||||
"ENI": {
|
||||
"Type": "AWS::EC2::NetworkInterface",
|
||||
"Properties": {
|
||||
"SubnetId": {"Ref": "Subnet"}
|
||||
}
|
||||
},
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"AvailabilityZone": "us-east-1a",
|
||||
"VpcId": {"Ref": "VPC"},
|
||||
"CidrBlock": "10.0.0.0/24"
|
||||
}
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs": {
|
||||
"NinjaENI": {
|
||||
"Description": "Elastic IP mapping to Auto-Scaling Group",
|
||||
"Value": {"Ref": "ENI"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,408 +1,408 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String"
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters."
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge"
|
||||
]
|
||||
}
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"URL": {
|
||||
"Description": "Newly created application URL",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"http://",
|
||||
{
|
||||
"Fn::GetAtt": [
|
||||
"WebServerInstance",
|
||||
"PublicIp"
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"WebServerWaitHandle": {
|
||||
"Type": "AWS::CloudFormation::WaitConditionHandle"
|
||||
},
|
||||
"Route": {
|
||||
"Type": "AWS::EC2::Route",
|
||||
"Properties": {
|
||||
"GatewayId": {
|
||||
"Ref": "InternetGateway"
|
||||
},
|
||||
"DestinationCidrBlock": "0.0.0.0/0",
|
||||
"RouteTableId": {
|
||||
"Ref": "RouteTable"
|
||||
}
|
||||
},
|
||||
"DependsOn": "AttachGateway"
|
||||
},
|
||||
"SubnetRouteTableAssociation": {
|
||||
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
||||
"Properties": {
|
||||
"SubnetId": {
|
||||
"Ref": "Subnet"
|
||||
},
|
||||
"RouteTableId": {
|
||||
"Ref": "RouteTable"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InternetGateway": {
|
||||
"Type": "AWS::EC2::InternetGateway",
|
||||
"Properties": {
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"RouteTable": {
|
||||
"Type": "AWS::EC2::RouteTable",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"WebServerWaitCondition": {
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {
|
||||
"Handle": {
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"Timeout": "300"
|
||||
},
|
||||
"DependsOn": "WebServerInstance"
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16",
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"InstanceSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {
|
||||
"Ref": "SSHLocation"
|
||||
},
|
||||
"FromPort": "22"
|
||||
},
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80"
|
||||
}
|
||||
],
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"GroupDescription": "Enable SSH access via port 22"
|
||||
}
|
||||
},
|
||||
"WebServerInstance": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
" /opt/aws/bin/cfn-signal -e 1 -r \"$1\" '",
|
||||
{
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install the simple web page\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
||||
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
||||
"# All done so signal success\n",
|
||||
"/opt/aws/bin/cfn-signal -e 0 -r \"WebServer setup complete\" '",
|
||||
{
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"'\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
},
|
||||
{
|
||||
"Value": "Bar",
|
||||
"Key": "Foo"
|
||||
}
|
||||
],
|
||||
"SecurityGroupIds": [
|
||||
{
|
||||
"Ref": "InstanceSecurityGroup"
|
||||
}
|
||||
],
|
||||
"KeyName": {
|
||||
"Ref": "KeyName"
|
||||
},
|
||||
"SubnetId": {
|
||||
"Ref": "Subnet"
|
||||
},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": [
|
||||
"RegionMap",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"AMI"
|
||||
]
|
||||
},
|
||||
"InstanceType": {
|
||||
"Ref": "InstanceType"
|
||||
}
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment": "Install a simple PHP application",
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"files": {
|
||||
"/etc/cfn/cfn-hup.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[main]\n",
|
||||
"stack=",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"\n",
|
||||
"region=",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"\n"
|
||||
]
|
||||
]
|
||||
},
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "000400"
|
||||
},
|
||||
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[cfn-auto-reloader-hook]\n",
|
||||
"triggers=post.update\n",
|
||||
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
||||
"action=/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"\n",
|
||||
"runas=root\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"/var/www/html/index.php": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"<?php\n",
|
||||
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
||||
"?>\n"
|
||||
]
|
||||
]
|
||||
},
|
||||
"owner": "apache",
|
||||
"group": "apache",
|
||||
"mode": "000644"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"sysvinit": {
|
||||
"httpd": {
|
||||
"ensureRunning": "true",
|
||||
"enabled": "true"
|
||||
},
|
||||
"sendmail": {
|
||||
"ensureRunning": "false",
|
||||
"enabled": "false"
|
||||
}
|
||||
}
|
||||
},
|
||||
"packages": {
|
||||
"yum": {
|
||||
"httpd": [],
|
||||
"php": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"IPAddress": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {
|
||||
"InstanceId": {
|
||||
"Ref": "WebServerInstance"
|
||||
},
|
||||
"Domain": "vpc"
|
||||
},
|
||||
"DependsOn": "AttachGateway"
|
||||
},
|
||||
"AttachGateway": {
|
||||
"Type": "AWS::EC2::VPCGatewayAttachment",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"InternetGatewayId": {
|
||||
"Ref": "InternetGateway"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Mappings": {
|
||||
"RegionMap": {
|
||||
"ap-southeast-1": {
|
||||
"AMI": "ami-74dda626"
|
||||
},
|
||||
"ap-southeast-2": {
|
||||
"AMI": "ami-b3990e89"
|
||||
},
|
||||
"us-west-2": {
|
||||
"AMI": "ami-16fd7026"
|
||||
},
|
||||
"us-east-1": {
|
||||
"AMI": "ami-7f418316"
|
||||
},
|
||||
"ap-northeast-1": {
|
||||
"AMI": "ami-dcfa4edd"
|
||||
},
|
||||
"us-west-1": {
|
||||
"AMI": "ami-951945d0"
|
||||
},
|
||||
"eu-west-1": {
|
||||
"AMI": "ami-24506250"
|
||||
},
|
||||
"sa-east-1": {
|
||||
"AMI": "ami-3e3be423"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
template = {
|
||||
"Description": "AWS CloudFormation Sample Template vpc_single_instance_in_subnet.template: Sample template showing how to create a VPC and add an EC2 instance with an Elastic IP address and a security group. **WARNING** This template creates an Amazon EC2 instance. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
"Parameters": {
|
||||
"SSHLocation": {
|
||||
"ConstraintDescription": "must be a valid IP CIDR range of the form x.x.x.x/x.",
|
||||
"Description": " The IP address range that can be used to SSH to the EC2 instances",
|
||||
"Default": "0.0.0.0/0",
|
||||
"MinLength": "9",
|
||||
"AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})",
|
||||
"MaxLength": "18",
|
||||
"Type": "String"
|
||||
},
|
||||
"KeyName": {
|
||||
"Type": "String",
|
||||
"Description": "Name of an existing EC2 KeyPair to enable SSH access to the instance",
|
||||
"MinLength": "1",
|
||||
"AllowedPattern": "[\\x20-\\x7E]*",
|
||||
"MaxLength": "255",
|
||||
"ConstraintDescription": "can contain only ASCII characters."
|
||||
},
|
||||
"InstanceType": {
|
||||
"Default": "m1.small",
|
||||
"ConstraintDescription": "must be a valid EC2 instance type.",
|
||||
"Type": "String",
|
||||
"Description": "WebServer EC2 instance type",
|
||||
"AllowedValues": [
|
||||
"t1.micro",
|
||||
"m1.small",
|
||||
"m1.medium",
|
||||
"m1.large",
|
||||
"m1.xlarge",
|
||||
"m2.xlarge",
|
||||
"m2.2xlarge",
|
||||
"m2.4xlarge",
|
||||
"m3.xlarge",
|
||||
"m3.2xlarge",
|
||||
"c1.medium",
|
||||
"c1.xlarge",
|
||||
"cc1.4xlarge",
|
||||
"cc2.8xlarge",
|
||||
"cg1.4xlarge"
|
||||
]
|
||||
}
|
||||
},
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Outputs": {
|
||||
"URL": {
|
||||
"Description": "Newly created application URL",
|
||||
"Value": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"http://",
|
||||
{
|
||||
"Fn::GetAtt": [
|
||||
"WebServerInstance",
|
||||
"PublicIp"
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"Resources": {
|
||||
"Subnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"WebServerWaitHandle": {
|
||||
"Type": "AWS::CloudFormation::WaitConditionHandle"
|
||||
},
|
||||
"Route": {
|
||||
"Type": "AWS::EC2::Route",
|
||||
"Properties": {
|
||||
"GatewayId": {
|
||||
"Ref": "InternetGateway"
|
||||
},
|
||||
"DestinationCidrBlock": "0.0.0.0/0",
|
||||
"RouteTableId": {
|
||||
"Ref": "RouteTable"
|
||||
}
|
||||
},
|
||||
"DependsOn": "AttachGateway"
|
||||
},
|
||||
"SubnetRouteTableAssociation": {
|
||||
"Type": "AWS::EC2::SubnetRouteTableAssociation",
|
||||
"Properties": {
|
||||
"SubnetId": {
|
||||
"Ref": "Subnet"
|
||||
},
|
||||
"RouteTableId": {
|
||||
"Ref": "RouteTable"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InternetGateway": {
|
||||
"Type": "AWS::EC2::InternetGateway",
|
||||
"Properties": {
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"RouteTable": {
|
||||
"Type": "AWS::EC2::RouteTable",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"WebServerWaitCondition": {
|
||||
"Type": "AWS::CloudFormation::WaitCondition",
|
||||
"Properties": {
|
||||
"Handle": {
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"Timeout": "300"
|
||||
},
|
||||
"DependsOn": "WebServerInstance"
|
||||
},
|
||||
"VPC": {
|
||||
"Type": "AWS::EC2::VPC",
|
||||
"Properties": {
|
||||
"CidrBlock": "10.0.0.0/16",
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"InstanceSecurityGroup": {
|
||||
"Type": "AWS::EC2::SecurityGroup",
|
||||
"Properties": {
|
||||
"SecurityGroupIngress": [
|
||||
{
|
||||
"ToPort": "22",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": {
|
||||
"Ref": "SSHLocation"
|
||||
},
|
||||
"FromPort": "22"
|
||||
},
|
||||
{
|
||||
"ToPort": "80",
|
||||
"IpProtocol": "tcp",
|
||||
"CidrIp": "0.0.0.0/0",
|
||||
"FromPort": "80"
|
||||
}
|
||||
],
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"GroupDescription": "Enable SSH access via port 22"
|
||||
}
|
||||
},
|
||||
"WebServerInstance": {
|
||||
"Type": "AWS::EC2::Instance",
|
||||
"Properties": {
|
||||
"UserData": {
|
||||
"Fn::Base64": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"#!/bin/bash\n",
|
||||
"yum update -y aws-cfn-bootstrap\n",
|
||||
"# Helper function\n",
|
||||
"function error_exit\n",
|
||||
"{\n",
|
||||
" /opt/aws/bin/cfn-signal -e 1 -r \"$1\" '",
|
||||
{
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"'\n",
|
||||
" exit 1\n",
|
||||
"}\n",
|
||||
"# Install the simple web page\n",
|
||||
"/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
" || error_exit 'Failed to run cfn-init'\n",
|
||||
"# Start up the cfn-hup daemon to listen for changes to the Web Server metadata\n",
|
||||
"/opt/aws/bin/cfn-hup || error_exit 'Failed to start cfn-hup'\n",
|
||||
"# All done so signal success\n",
|
||||
"/opt/aws/bin/cfn-signal -e 0 -r \"WebServer setup complete\" '",
|
||||
{
|
||||
"Ref": "WebServerWaitHandle"
|
||||
},
|
||||
"'\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"Tags": [
|
||||
{
|
||||
"Value": {
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"Key": "Application"
|
||||
},
|
||||
{
|
||||
"Value": "Bar",
|
||||
"Key": "Foo"
|
||||
}
|
||||
],
|
||||
"SecurityGroupIds": [
|
||||
{
|
||||
"Ref": "InstanceSecurityGroup"
|
||||
}
|
||||
],
|
||||
"KeyName": {
|
||||
"Ref": "KeyName"
|
||||
},
|
||||
"SubnetId": {
|
||||
"Ref": "Subnet"
|
||||
},
|
||||
"ImageId": {
|
||||
"Fn::FindInMap": [
|
||||
"RegionMap",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"AMI"
|
||||
]
|
||||
},
|
||||
"InstanceType": {
|
||||
"Ref": "InstanceType"
|
||||
}
|
||||
},
|
||||
"Metadata": {
|
||||
"Comment": "Install a simple PHP application",
|
||||
"AWS::CloudFormation::Init": {
|
||||
"config": {
|
||||
"files": {
|
||||
"/etc/cfn/cfn-hup.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[main]\n",
|
||||
"stack=",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
"\n",
|
||||
"region=",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"\n"
|
||||
]
|
||||
]
|
||||
},
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "000400"
|
||||
},
|
||||
"/etc/cfn/hooks.d/cfn-auto-reloader.conf": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"[cfn-auto-reloader-hook]\n",
|
||||
"triggers=post.update\n",
|
||||
"path=Resources.WebServerInstance.Metadata.AWS::CloudFormation::Init\n",
|
||||
"action=/opt/aws/bin/cfn-init -s ",
|
||||
{
|
||||
"Ref": "AWS::StackId"
|
||||
},
|
||||
" -r WebServerInstance ",
|
||||
" --region ",
|
||||
{
|
||||
"Ref": "AWS::Region"
|
||||
},
|
||||
"\n",
|
||||
"runas=root\n"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"/var/www/html/index.php": {
|
||||
"content": {
|
||||
"Fn::Join": [
|
||||
"",
|
||||
[
|
||||
"<?php\n",
|
||||
"echo '<h1>AWS CloudFormation sample PHP application</h1>';\n",
|
||||
"?>\n"
|
||||
]
|
||||
]
|
||||
},
|
||||
"owner": "apache",
|
||||
"group": "apache",
|
||||
"mode": "000644"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"sysvinit": {
|
||||
"httpd": {
|
||||
"ensureRunning": "true",
|
||||
"enabled": "true"
|
||||
},
|
||||
"sendmail": {
|
||||
"ensureRunning": "false",
|
||||
"enabled": "false"
|
||||
}
|
||||
}
|
||||
},
|
||||
"packages": {
|
||||
"yum": {
|
||||
"httpd": [],
|
||||
"php": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"IPAddress": {
|
||||
"Type": "AWS::EC2::EIP",
|
||||
"Properties": {
|
||||
"InstanceId": {
|
||||
"Ref": "WebServerInstance"
|
||||
},
|
||||
"Domain": "vpc"
|
||||
},
|
||||
"DependsOn": "AttachGateway"
|
||||
},
|
||||
"AttachGateway": {
|
||||
"Type": "AWS::EC2::VPCGatewayAttachment",
|
||||
"Properties": {
|
||||
"VpcId": {
|
||||
"Ref": "VPC"
|
||||
},
|
||||
"InternetGatewayId": {
|
||||
"Ref": "InternetGateway"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Mappings": {
|
||||
"RegionMap": {
|
||||
"ap-southeast-1": {
|
||||
"AMI": "ami-74dda626"
|
||||
},
|
||||
"ap-southeast-2": {
|
||||
"AMI": "ami-b3990e89"
|
||||
},
|
||||
"us-west-2": {
|
||||
"AMI": "ami-16fd7026"
|
||||
},
|
||||
"us-east-1": {
|
||||
"AMI": "ami-7f418316"
|
||||
},
|
||||
"ap-northeast-1": {
|
||||
"AMI": "ami-dcfa4edd"
|
||||
},
|
||||
"us-west-1": {
|
||||
"AMI": "ami-951945d0"
|
||||
},
|
||||
"eu-west-1": {
|
||||
"AMI": "ami-24506250"
|
||||
},
|
||||
"sa-east-1": {
|
||||
"AMI": "ami-3e3be423"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,87 +1,87 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
# Standard library modules
|
||||
import unittest
|
||||
|
||||
# Third-party modules
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
# Package modules
|
||||
from moto import mock_cloudformation
|
||||
|
||||
AWS_REGION = 'us-west-1'
|
||||
|
||||
SG_STACK_NAME = 'simple-sg-stack'
|
||||
SG_TEMPLATE = """
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Simple test CF template for moto_cloudformation
|
||||
|
||||
|
||||
Resources:
|
||||
SimpleSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Description: "A simple security group"
|
||||
Properties:
|
||||
GroupName: simple-security-group
|
||||
GroupDescription: "A simple security group"
|
||||
SecurityGroupEgress:
|
||||
-
|
||||
Description: "Egress to remote HTTPS servers"
|
||||
CidrIp: 0.0.0.0/0
|
||||
IpProtocol: tcp
|
||||
FromPort: 443
|
||||
ToPort: 443
|
||||
|
||||
Outputs:
|
||||
SimpleSecurityGroupName:
|
||||
Value: !GetAtt SimpleSecurityGroup.GroupId
|
||||
Export:
|
||||
Name: "SimpleSecurityGroup"
|
||||
|
||||
"""
|
||||
|
||||
EC2_STACK_NAME = 'simple-ec2-stack'
|
||||
EC2_TEMPLATE = """
|
||||
---
|
||||
# The latest template format version is "2010-09-09" and as of 2018-04-09
|
||||
# is currently the only valid value.
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Simple test CF template for moto_cloudformation
|
||||
|
||||
|
||||
Resources:
|
||||
SimpleInstance:
|
||||
Type: AWS::EC2::Instance
|
||||
Properties:
|
||||
ImageId: ami-03cf127a
|
||||
InstanceType: t2.micro
|
||||
SecurityGroups: !Split [',', !ImportValue SimpleSecurityGroup]
|
||||
"""
|
||||
|
||||
|
||||
class TestSimpleInstance(unittest.TestCase):
|
||||
def test_simple_instance(self):
|
||||
"""Test that we can create a simple CloudFormation stack that imports values from an existing CloudFormation stack"""
|
||||
with mock_cloudformation():
|
||||
client = boto3.client('cloudformation', region_name=AWS_REGION)
|
||||
client.create_stack(StackName=SG_STACK_NAME, TemplateBody=SG_TEMPLATE)
|
||||
response = client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
|
||||
self.assertIn('StackId', response)
|
||||
response = client.describe_stacks(StackName=response['StackId'])
|
||||
self.assertIn('Stacks', response)
|
||||
stack_info = response['Stacks']
|
||||
self.assertEqual(1, len(stack_info))
|
||||
self.assertIn('StackName', stack_info[0])
|
||||
self.assertEqual(EC2_STACK_NAME, stack_info[0]['StackName'])
|
||||
|
||||
def test_simple_instance_missing_export(self):
|
||||
"""Test that we get an exception if a CloudFormation stack tries to imports a non-existent export value"""
|
||||
with mock_cloudformation():
|
||||
client = boto3.client('cloudformation', region_name=AWS_REGION)
|
||||
with self.assertRaises(ClientError) as e:
|
||||
client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
|
||||
self.assertIn('Error', e.exception.response)
|
||||
self.assertIn('Code', e.exception.response['Error'])
|
||||
self.assertEqual('ExportNotFound', e.exception.response['Error']['Code'])
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
# Standard library modules
|
||||
import unittest
|
||||
|
||||
# Third-party modules
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
# Package modules
|
||||
from moto import mock_cloudformation
|
||||
|
||||
AWS_REGION = 'us-west-1'
|
||||
|
||||
SG_STACK_NAME = 'simple-sg-stack'
|
||||
SG_TEMPLATE = """
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Simple test CF template for moto_cloudformation
|
||||
|
||||
|
||||
Resources:
|
||||
SimpleSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Description: "A simple security group"
|
||||
Properties:
|
||||
GroupName: simple-security-group
|
||||
GroupDescription: "A simple security group"
|
||||
SecurityGroupEgress:
|
||||
-
|
||||
Description: "Egress to remote HTTPS servers"
|
||||
CidrIp: 0.0.0.0/0
|
||||
IpProtocol: tcp
|
||||
FromPort: 443
|
||||
ToPort: 443
|
||||
|
||||
Outputs:
|
||||
SimpleSecurityGroupName:
|
||||
Value: !GetAtt SimpleSecurityGroup.GroupId
|
||||
Export:
|
||||
Name: "SimpleSecurityGroup"
|
||||
|
||||
"""
|
||||
|
||||
EC2_STACK_NAME = 'simple-ec2-stack'
|
||||
EC2_TEMPLATE = """
|
||||
---
|
||||
# The latest template format version is "2010-09-09" and as of 2018-04-09
|
||||
# is currently the only valid value.
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Simple test CF template for moto_cloudformation
|
||||
|
||||
|
||||
Resources:
|
||||
SimpleInstance:
|
||||
Type: AWS::EC2::Instance
|
||||
Properties:
|
||||
ImageId: ami-03cf127a
|
||||
InstanceType: t2.micro
|
||||
SecurityGroups: !Split [',', !ImportValue SimpleSecurityGroup]
|
||||
"""
|
||||
|
||||
|
||||
class TestSimpleInstance(unittest.TestCase):
|
||||
def test_simple_instance(self):
|
||||
"""Test that we can create a simple CloudFormation stack that imports values from an existing CloudFormation stack"""
|
||||
with mock_cloudformation():
|
||||
client = boto3.client('cloudformation', region_name=AWS_REGION)
|
||||
client.create_stack(StackName=SG_STACK_NAME, TemplateBody=SG_TEMPLATE)
|
||||
response = client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
|
||||
self.assertIn('StackId', response)
|
||||
response = client.describe_stacks(StackName=response['StackId'])
|
||||
self.assertIn('Stacks', response)
|
||||
stack_info = response['Stacks']
|
||||
self.assertEqual(1, len(stack_info))
|
||||
self.assertIn('StackName', stack_info[0])
|
||||
self.assertEqual(EC2_STACK_NAME, stack_info[0]['StackName'])
|
||||
|
||||
def test_simple_instance_missing_export(self):
|
||||
"""Test that we get an exception if a CloudFormation stack tries to imports a non-existent export value"""
|
||||
with mock_cloudformation():
|
||||
client = boto3.client('cloudformation', region_name=AWS_REGION)
|
||||
with self.assertRaises(ClientError) as e:
|
||||
client.create_stack(StackName=EC2_STACK_NAME, TemplateBody=EC2_TEMPLATE)
|
||||
self.assertIn('Error', e.exception.response)
|
||||
self.assertIn('Code', e.exception.response['Error'])
|
||||
self.assertEqual('ExportNotFound', e.exception.response['Error']['Code'])
|
||||
|
|
|
|||
|
|
@ -1,33 +1,33 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
from six.moves.urllib.parse import urlencode
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_cloudformation_server_get():
|
||||
backend = server.create_backend_app("cloudformation")
|
||||
stack_name = 'test stack'
|
||||
test_client = backend.test_client()
|
||||
template_body = {
|
||||
"Resources": {},
|
||||
}
|
||||
create_stack_resp = test_client.action_data("CreateStack", StackName=stack_name,
|
||||
TemplateBody=json.dumps(template_body))
|
||||
create_stack_resp.should.match(
|
||||
r"<CreateStackResponse>.*<CreateStackResult>.*<StackId>.*</StackId>.*</CreateStackResult>.*</CreateStackResponse>", re.DOTALL)
|
||||
stack_id_from_create_response = re.search(
|
||||
"<StackId>(.*)</StackId>", create_stack_resp).groups()[0]
|
||||
|
||||
list_stacks_resp = test_client.action_data("ListStacks")
|
||||
stack_id_from_list_response = re.search(
|
||||
"<StackId>(.*)</StackId>", list_stacks_resp).groups()[0]
|
||||
|
||||
stack_id_from_create_response.should.equal(stack_id_from_list_response)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
from six.moves.urllib.parse import urlencode
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_cloudformation_server_get():
|
||||
backend = server.create_backend_app("cloudformation")
|
||||
stack_name = 'test stack'
|
||||
test_client = backend.test_client()
|
||||
template_body = {
|
||||
"Resources": {},
|
||||
}
|
||||
create_stack_resp = test_client.action_data("CreateStack", StackName=stack_name,
|
||||
TemplateBody=json.dumps(template_body))
|
||||
create_stack_resp.should.match(
|
||||
r"<CreateStackResponse>.*<CreateStackResult>.*<StackId>.*</StackId>.*</CreateStackResult>.*</CreateStackResponse>", re.DOTALL)
|
||||
stack_id_from_create_response = re.search(
|
||||
"<StackId>(.*)</StackId>", create_stack_resp).groups()[0]
|
||||
|
||||
list_stacks_resp = test_client.action_data("ListStacks")
|
||||
stack_id_from_list_response = re.search(
|
||||
"<StackId>(.*)</StackId>", list_stacks_resp).groups()[0]
|
||||
|
||||
stack_id_from_create_response.should.equal(stack_id_from_list_response)
|
||||
|
|
|
|||
|
|
@ -1,471 +1,471 @@
|
|||
from __future__ import unicode_literals
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from mock import patch
|
||||
import sure # noqa
|
||||
|
||||
from moto.cloudformation.exceptions import ValidationError
|
||||
from moto.cloudformation.models import FakeStack
|
||||
from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export
|
||||
from moto.sqs.models import Queue
|
||||
from moto.s3.models import FakeBucket
|
||||
from moto.cloudformation.utils import yaml_tag_constructor
|
||||
from boto.cloudformation.stack import Output
|
||||
|
||||
|
||||
|
||||
dummy_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
|
||||
"Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": "my-queue",
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
"S3Bucket": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"DeletionPolicy": "Retain"
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
name_type_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
|
||||
"Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
output_dict = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Ref": "Queue"},
|
||||
"Description": "This is a description."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bad_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAtt": ["Queue", "InvalidAttribute"]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get_attribute_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAtt": ["Queue", "QueueName"]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get_availability_zones_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAZs": ""}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
split_select_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Select": [ "1", {"Fn::Split": [ "-", "123-myqueue" ] } ] },
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sub_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue1": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${AWS::StackName}-queue-${!Literal}'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
"Queue2": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${Queue1.QueueName}'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export_value_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${AWS::StackName}-queue'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": "value",
|
||||
"Export": {"Name": 'queue-us-west-1'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import_value_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::ImportValue": 'queue-us-west-1'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outputs_template = dict(list(dummy_template.items()) +
|
||||
list(output_dict.items()))
|
||||
bad_outputs_template = dict(
|
||||
list(dummy_template.items()) + list(bad_output.items()))
|
||||
get_attribute_outputs_template = dict(
|
||||
list(dummy_template.items()) + list(get_attribute_output.items()))
|
||||
get_availability_zones_template = dict(
|
||||
list(dummy_template.items()) + list(get_availability_zones_output.items()))
|
||||
|
||||
dummy_template_json = json.dumps(dummy_template)
|
||||
name_type_template_json = json.dumps(name_type_template)
|
||||
output_type_template_json = json.dumps(outputs_template)
|
||||
bad_output_template_json = json.dumps(bad_outputs_template)
|
||||
get_attribute_outputs_template_json = json.dumps(
|
||||
get_attribute_outputs_template)
|
||||
get_availability_zones_template_json = json.dumps(
|
||||
get_availability_zones_template)
|
||||
split_select_template_json = json.dumps(split_select_template)
|
||||
sub_template_json = json.dumps(sub_template)
|
||||
export_value_template_json = json.dumps(export_value_template)
|
||||
import_value_template_json = json.dumps(import_value_template)
|
||||
|
||||
|
||||
def test_parse_stack_resources():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=dummy_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(2)
|
||||
|
||||
queue = stack.resource_map['Queue']
|
||||
queue.should.be.a(Queue)
|
||||
queue.name.should.equal("my-queue")
|
||||
|
||||
bucket = stack.resource_map['S3Bucket']
|
||||
bucket.should.be.a(FakeBucket)
|
||||
bucket.physical_resource_id.should.equal(bucket.name)
|
||||
|
||||
|
||||
@patch("moto.cloudformation.parsing.logger")
|
||||
def test_missing_resource_logs(logger):
|
||||
resource_class_from_type("foobar")
|
||||
logger.warning.assert_called_with(
|
||||
'No Moto CloudFormation support for %s', 'foobar')
|
||||
|
||||
|
||||
def test_parse_stack_with_name_type_resource():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=name_type_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
list(stack.resource_map.keys())[0].should.equal('Queue')
|
||||
queue = list(stack.resource_map.values())[0]
|
||||
queue.should.be.a(Queue)
|
||||
|
||||
|
||||
def test_parse_stack_with_yaml_template():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=yaml.dump(name_type_template),
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
list(stack.resource_map.keys())[0].should.equal('Queue')
|
||||
queue = list(stack.resource_map.values())[0]
|
||||
queue.should.be.a(Queue)
|
||||
|
||||
|
||||
def test_parse_stack_with_outputs():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=output_type_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.description.should.equal("This is a description.")
|
||||
|
||||
|
||||
def test_parse_stack_with_get_attribute_outputs():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=get_attribute_outputs_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.value.should.equal("my-queue")
|
||||
|
||||
def test_parse_stack_with_get_attribute_kms():
|
||||
from .fixtures.kms_key import template
|
||||
|
||||
template_json = json.dumps(template)
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('KeyArn')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
|
||||
def test_parse_stack_with_get_availability_zones():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=get_availability_zones_template_json,
|
||||
parameters={},
|
||||
region_name='us-east-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.value.should.equal([ "us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d" ])
|
||||
|
||||
|
||||
def test_parse_stack_with_bad_get_attribute_outputs():
|
||||
FakeStack.when.called_with(
|
||||
"test_id", "test_stack", bad_output_template_json, {}, "us-west-1").should.throw(ValidationError)
|
||||
|
||||
|
||||
def test_parse_equals_condition():
|
||||
parse_condition(
|
||||
condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
parse_condition(
|
||||
condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
resources_map={"EnvType": "staging"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_parse_not_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Not": [{
|
||||
"Fn::Equals": [{"Ref": "EnvType"}, "prod"]
|
||||
}]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Not": [{
|
||||
"Fn::Equals": [{"Ref": "EnvType"}, "prod"]
|
||||
}]
|
||||
},
|
||||
resources_map={"EnvType": "staging"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
|
||||
def test_parse_and_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::And": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::And": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
|
||||
def test_parse_or_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Or": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Or": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_reference_other_conditions():
|
||||
parse_condition(
|
||||
condition={"Fn::Not": [{"Condition": "OtherCondition"}]},
|
||||
resources_map={},
|
||||
condition_map={"OtherCondition": True},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_parse_split_and_select():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=split_select_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
queue = stack.resource_map['Queue']
|
||||
queue.name.should.equal("myqueue")
|
||||
|
||||
|
||||
def test_sub():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=sub_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
queue1 = stack.resource_map['Queue1']
|
||||
queue2 = stack.resource_map['Queue2']
|
||||
queue2.name.should.equal(queue1.name)
|
||||
|
||||
|
||||
def test_import():
|
||||
export_stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=export_value_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
import_stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=import_value_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1',
|
||||
cross_stack_resources={export_stack.exports[0].value: export_stack.exports[0]})
|
||||
|
||||
queue = import_stack.resource_map['Queue']
|
||||
queue.name.should.equal("value")
|
||||
|
||||
|
||||
|
||||
def test_short_form_func_in_yaml_teamplate():
|
||||
template = """---
|
||||
KeyB64: !Base64 valueToEncode
|
||||
KeyRef: !Ref foo
|
||||
KeyAnd: !And
|
||||
- A
|
||||
- B
|
||||
KeyEquals: !Equals [A, B]
|
||||
KeyIf: !If [A, B, C]
|
||||
KeyNot: !Not [A]
|
||||
KeyOr: !Or [A, B]
|
||||
KeyFindInMap: !FindInMap [A, B, C]
|
||||
KeyGetAtt: !GetAtt A.B
|
||||
KeyGetAZs: !GetAZs A
|
||||
KeyImportValue: !ImportValue A
|
||||
KeyJoin: !Join [ ":", [A, B, C] ]
|
||||
KeySelect: !Select [A, B]
|
||||
KeySplit: !Split [A, B]
|
||||
KeySub: !Sub A
|
||||
"""
|
||||
yaml.add_multi_constructor('', yaml_tag_constructor)
|
||||
template_dict = yaml.load(template)
|
||||
key_and_expects = [
|
||||
['KeyRef', {'Ref': 'foo'}],
|
||||
['KeyB64', {'Fn::Base64': 'valueToEncode'}],
|
||||
['KeyAnd', {'Fn::And': ['A', 'B']}],
|
||||
['KeyEquals', {'Fn::Equals': ['A', 'B']}],
|
||||
['KeyIf', {'Fn::If': ['A', 'B', 'C']}],
|
||||
['KeyNot', {'Fn::Not': ['A']}],
|
||||
['KeyOr', {'Fn::Or': ['A', 'B']}],
|
||||
['KeyFindInMap', {'Fn::FindInMap': ['A', 'B', 'C']}],
|
||||
['KeyGetAtt', {'Fn::GetAtt': ['A', 'B']}],
|
||||
['KeyGetAZs', {'Fn::GetAZs': 'A'}],
|
||||
['KeyImportValue', {'Fn::ImportValue': 'A'}],
|
||||
['KeyJoin', {'Fn::Join': [ ":", [ 'A', 'B', 'C' ] ]}],
|
||||
['KeySelect', {'Fn::Select': ['A', 'B']}],
|
||||
['KeySplit', {'Fn::Split': ['A', 'B']}],
|
||||
['KeySub', {'Fn::Sub': 'A'}],
|
||||
]
|
||||
for k, v in key_and_expects:
|
||||
template_dict.should.have.key(k).which.should.be.equal(v)
|
||||
from __future__ import unicode_literals
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from mock import patch
|
||||
import sure # noqa
|
||||
|
||||
from moto.cloudformation.exceptions import ValidationError
|
||||
from moto.cloudformation.models import FakeStack
|
||||
from moto.cloudformation.parsing import resource_class_from_type, parse_condition, Export
|
||||
from moto.sqs.models import Queue
|
||||
from moto.s3.models import FakeBucket
|
||||
from moto.cloudformation.utils import yaml_tag_constructor
|
||||
from boto.cloudformation.stack import Output
|
||||
|
||||
|
||||
|
||||
dummy_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
|
||||
"Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": "my-queue",
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
"S3Bucket": {
|
||||
"Type": "AWS::S3::Bucket",
|
||||
"DeletionPolicy": "Retain"
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
name_type_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
|
||||
"Description": "Create a multi-az, load balanced, Auto Scaled sample web site. The Auto Scaling trigger is based on the CPU utilization of the web servers. The AMI is chosen based on the region in which the stack is run. This example creates a web service running across all availability zones in a region. The instances are load balanced with a simple health check. The web site is available on port 80, however, the instances can be configured to listen on any port (8888 by default). **WARNING** This template creates one or more Amazon EC2 instances. You will be billed for the AWS resources used if you create a stack from this template.",
|
||||
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
output_dict = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Ref": "Queue"},
|
||||
"Description": "This is a description."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bad_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAtt": ["Queue", "InvalidAttribute"]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get_attribute_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAtt": ["Queue", "QueueName"]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get_availability_zones_output = {
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": {"Fn::GetAZs": ""}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
split_select_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Select": [ "1", {"Fn::Split": [ "-", "123-myqueue" ] } ] },
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sub_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue1": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${AWS::StackName}-queue-${!Literal}'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
"Queue2": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${Queue1.QueueName}'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export_value_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::Sub": '${AWS::StackName}-queue'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs": {
|
||||
"Output1": {
|
||||
"Value": "value",
|
||||
"Export": {"Name": 'queue-us-west-1'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import_value_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"Queue": {
|
||||
"Type": "AWS::SQS::Queue",
|
||||
"Properties": {
|
||||
"QueueName": {"Fn::ImportValue": 'queue-us-west-1'},
|
||||
"VisibilityTimeout": 60,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outputs_template = dict(list(dummy_template.items()) +
|
||||
list(output_dict.items()))
|
||||
bad_outputs_template = dict(
|
||||
list(dummy_template.items()) + list(bad_output.items()))
|
||||
get_attribute_outputs_template = dict(
|
||||
list(dummy_template.items()) + list(get_attribute_output.items()))
|
||||
get_availability_zones_template = dict(
|
||||
list(dummy_template.items()) + list(get_availability_zones_output.items()))
|
||||
|
||||
dummy_template_json = json.dumps(dummy_template)
|
||||
name_type_template_json = json.dumps(name_type_template)
|
||||
output_type_template_json = json.dumps(outputs_template)
|
||||
bad_output_template_json = json.dumps(bad_outputs_template)
|
||||
get_attribute_outputs_template_json = json.dumps(
|
||||
get_attribute_outputs_template)
|
||||
get_availability_zones_template_json = json.dumps(
|
||||
get_availability_zones_template)
|
||||
split_select_template_json = json.dumps(split_select_template)
|
||||
sub_template_json = json.dumps(sub_template)
|
||||
export_value_template_json = json.dumps(export_value_template)
|
||||
import_value_template_json = json.dumps(import_value_template)
|
||||
|
||||
|
||||
def test_parse_stack_resources():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=dummy_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(2)
|
||||
|
||||
queue = stack.resource_map['Queue']
|
||||
queue.should.be.a(Queue)
|
||||
queue.name.should.equal("my-queue")
|
||||
|
||||
bucket = stack.resource_map['S3Bucket']
|
||||
bucket.should.be.a(FakeBucket)
|
||||
bucket.physical_resource_id.should.equal(bucket.name)
|
||||
|
||||
|
||||
@patch("moto.cloudformation.parsing.logger")
|
||||
def test_missing_resource_logs(logger):
|
||||
resource_class_from_type("foobar")
|
||||
logger.warning.assert_called_with(
|
||||
'No Moto CloudFormation support for %s', 'foobar')
|
||||
|
||||
|
||||
def test_parse_stack_with_name_type_resource():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=name_type_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
list(stack.resource_map.keys())[0].should.equal('Queue')
|
||||
queue = list(stack.resource_map.values())[0]
|
||||
queue.should.be.a(Queue)
|
||||
|
||||
|
||||
def test_parse_stack_with_yaml_template():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=yaml.dump(name_type_template),
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
list(stack.resource_map.keys())[0].should.equal('Queue')
|
||||
queue = list(stack.resource_map.values())[0]
|
||||
queue.should.be.a(Queue)
|
||||
|
||||
|
||||
def test_parse_stack_with_outputs():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=output_type_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.description.should.equal("This is a description.")
|
||||
|
||||
|
||||
def test_parse_stack_with_get_attribute_outputs():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=get_attribute_outputs_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.value.should.equal("my-queue")
|
||||
|
||||
def test_parse_stack_with_get_attribute_kms():
|
||||
from .fixtures.kms_key import template
|
||||
|
||||
template_json = json.dumps(template)
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('KeyArn')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
|
||||
def test_parse_stack_with_get_availability_zones():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=get_availability_zones_template_json,
|
||||
parameters={},
|
||||
region_name='us-east-1')
|
||||
|
||||
stack.output_map.should.have.length_of(1)
|
||||
list(stack.output_map.keys())[0].should.equal('Output1')
|
||||
output = list(stack.output_map.values())[0]
|
||||
output.should.be.a(Output)
|
||||
output.value.should.equal([ "us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d" ])
|
||||
|
||||
|
||||
def test_parse_stack_with_bad_get_attribute_outputs():
|
||||
FakeStack.when.called_with(
|
||||
"test_id", "test_stack", bad_output_template_json, {}, "us-west-1").should.throw(ValidationError)
|
||||
|
||||
|
||||
def test_parse_equals_condition():
|
||||
parse_condition(
|
||||
condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
parse_condition(
|
||||
condition={"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
resources_map={"EnvType": "staging"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_parse_not_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Not": [{
|
||||
"Fn::Equals": [{"Ref": "EnvType"}, "prod"]
|
||||
}]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Not": [{
|
||||
"Fn::Equals": [{"Ref": "EnvType"}, "prod"]
|
||||
}]
|
||||
},
|
||||
resources_map={"EnvType": "staging"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
|
||||
def test_parse_and_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::And": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::And": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
|
||||
def test_parse_or_condition():
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Or": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "prod"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(True)
|
||||
|
||||
parse_condition(
|
||||
condition={
|
||||
"Fn::Or": [
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
{"Fn::Equals": [{"Ref": "EnvType"}, "staging"]},
|
||||
]
|
||||
},
|
||||
resources_map={"EnvType": "prod"},
|
||||
condition_map={},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_reference_other_conditions():
|
||||
parse_condition(
|
||||
condition={"Fn::Not": [{"Condition": "OtherCondition"}]},
|
||||
resources_map={},
|
||||
condition_map={"OtherCondition": True},
|
||||
).should.equal(False)
|
||||
|
||||
|
||||
def test_parse_split_and_select():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=split_select_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
stack.resource_map.should.have.length_of(1)
|
||||
queue = stack.resource_map['Queue']
|
||||
queue.name.should.equal("myqueue")
|
||||
|
||||
|
||||
def test_sub():
|
||||
stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=sub_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
|
||||
queue1 = stack.resource_map['Queue1']
|
||||
queue2 = stack.resource_map['Queue2']
|
||||
queue2.name.should.equal(queue1.name)
|
||||
|
||||
|
||||
def test_import():
|
||||
export_stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=export_value_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1')
|
||||
import_stack = FakeStack(
|
||||
stack_id="test_id",
|
||||
name="test_stack",
|
||||
template=import_value_template_json,
|
||||
parameters={},
|
||||
region_name='us-west-1',
|
||||
cross_stack_resources={export_stack.exports[0].value: export_stack.exports[0]})
|
||||
|
||||
queue = import_stack.resource_map['Queue']
|
||||
queue.name.should.equal("value")
|
||||
|
||||
|
||||
|
||||
def test_short_form_func_in_yaml_teamplate():
|
||||
template = """---
|
||||
KeyB64: !Base64 valueToEncode
|
||||
KeyRef: !Ref foo
|
||||
KeyAnd: !And
|
||||
- A
|
||||
- B
|
||||
KeyEquals: !Equals [A, B]
|
||||
KeyIf: !If [A, B, C]
|
||||
KeyNot: !Not [A]
|
||||
KeyOr: !Or [A, B]
|
||||
KeyFindInMap: !FindInMap [A, B, C]
|
||||
KeyGetAtt: !GetAtt A.B
|
||||
KeyGetAZs: !GetAZs A
|
||||
KeyImportValue: !ImportValue A
|
||||
KeyJoin: !Join [ ":", [A, B, C] ]
|
||||
KeySelect: !Select [A, B]
|
||||
KeySplit: !Split [A, B]
|
||||
KeySub: !Sub A
|
||||
"""
|
||||
yaml.add_multi_constructor('', yaml_tag_constructor)
|
||||
template_dict = yaml.load(template)
|
||||
key_and_expects = [
|
||||
['KeyRef', {'Ref': 'foo'}],
|
||||
['KeyB64', {'Fn::Base64': 'valueToEncode'}],
|
||||
['KeyAnd', {'Fn::And': ['A', 'B']}],
|
||||
['KeyEquals', {'Fn::Equals': ['A', 'B']}],
|
||||
['KeyIf', {'Fn::If': ['A', 'B', 'C']}],
|
||||
['KeyNot', {'Fn::Not': ['A']}],
|
||||
['KeyOr', {'Fn::Or': ['A', 'B']}],
|
||||
['KeyFindInMap', {'Fn::FindInMap': ['A', 'B', 'C']}],
|
||||
['KeyGetAtt', {'Fn::GetAtt': ['A', 'B']}],
|
||||
['KeyGetAZs', {'Fn::GetAZs': 'A'}],
|
||||
['KeyImportValue', {'Fn::ImportValue': 'A'}],
|
||||
['KeyJoin', {'Fn::Join': [ ":", [ 'A', 'B', 'C' ] ]}],
|
||||
['KeySelect', {'Fn::Select': ['A', 'B']}],
|
||||
['KeySplit', {'Fn::Split': ['A', 'B']}],
|
||||
['KeySub', {'Fn::Sub': 'A'}],
|
||||
]
|
||||
for k, v in key_and_expects:
|
||||
template_dict.should.have.key(k).which.should.be.equal(v)
|
||||
|
|
|
|||
|
|
@ -1,123 +1,123 @@
|
|||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import boto3
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
||||
|
||||
def alarm_fixture(name="tester", action=None):
|
||||
action = action or ['arn:alarm']
|
||||
return MetricAlarm(
|
||||
name=name,
|
||||
namespace="{0}_namespace".format(name),
|
||||
metric="{0}_metric".format(name),
|
||||
comparison='>=',
|
||||
threshold=2.0,
|
||||
period=60,
|
||||
evaluation_periods=5,
|
||||
statistic='Average',
|
||||
description='A test',
|
||||
dimensions={'InstanceId': ['i-0123456,i-0123457']},
|
||||
alarm_actions=action,
|
||||
ok_actions=['arn:ok'],
|
||||
insufficient_data_actions=['arn:insufficient'],
|
||||
unit='Seconds',
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_create_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
alarm = alarms[0]
|
||||
alarm.name.should.equal('tester')
|
||||
alarm.namespace.should.equal('tester_namespace')
|
||||
alarm.metric.should.equal('tester_metric')
|
||||
alarm.comparison.should.equal('>=')
|
||||
alarm.threshold.should.equal(2.0)
|
||||
alarm.period.should.equal(60)
|
||||
alarm.evaluation_periods.should.equal(5)
|
||||
alarm.statistic.should.equal('Average')
|
||||
alarm.description.should.equal('A test')
|
||||
dict(alarm.dimensions).should.equal(
|
||||
{'InstanceId': ['i-0123456,i-0123457']})
|
||||
list(alarm.alarm_actions).should.equal(['arn:alarm'])
|
||||
list(alarm.ok_actions).should.equal(['arn:ok'])
|
||||
list(alarm.insufficient_data_actions).should.equal(['arn:insufficient'])
|
||||
alarm.unit.should.equal('Seconds')
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_delete_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
|
||||
alarms[0].delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_put_metric_data():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
conn.put_metric_data(
|
||||
namespace='tester',
|
||||
name='metric',
|
||||
value=1.5,
|
||||
dimensions={'InstanceId': ['i-0123456,i-0123457']},
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric.namespace.should.equal('tester')
|
||||
metric.name.should.equal('metric')
|
||||
dict(metric.dimensions).should.equal(
|
||||
{'InstanceId': ['i-0123456,i-0123457']})
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_describe_alarms():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
||||
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
||||
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
||||
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(4)
|
||||
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
||||
alarms.should.have.length_of(2)
|
||||
alarms = conn.describe_alarms(
|
||||
alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
||||
alarms.should.have.length_of(3)
|
||||
alarms = conn.describe_alarms(action_prefix="afoo")
|
||||
alarms.should.have.length_of(2)
|
||||
|
||||
for alarm in conn.describe_alarms():
|
||||
alarm.delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
import boto
|
||||
from boto.ec2.cloudwatch.alarm import MetricAlarm
|
||||
import boto3
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch_deprecated
|
||||
|
||||
|
||||
def alarm_fixture(name="tester", action=None):
|
||||
action = action or ['arn:alarm']
|
||||
return MetricAlarm(
|
||||
name=name,
|
||||
namespace="{0}_namespace".format(name),
|
||||
metric="{0}_metric".format(name),
|
||||
comparison='>=',
|
||||
threshold=2.0,
|
||||
period=60,
|
||||
evaluation_periods=5,
|
||||
statistic='Average',
|
||||
description='A test',
|
||||
dimensions={'InstanceId': ['i-0123456,i-0123457']},
|
||||
alarm_actions=action,
|
||||
ok_actions=['arn:ok'],
|
||||
insufficient_data_actions=['arn:insufficient'],
|
||||
unit='Seconds',
|
||||
)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_create_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
alarm = alarms[0]
|
||||
alarm.name.should.equal('tester')
|
||||
alarm.namespace.should.equal('tester_namespace')
|
||||
alarm.metric.should.equal('tester_metric')
|
||||
alarm.comparison.should.equal('>=')
|
||||
alarm.threshold.should.equal(2.0)
|
||||
alarm.period.should.equal(60)
|
||||
alarm.evaluation_periods.should.equal(5)
|
||||
alarm.statistic.should.equal('Average')
|
||||
alarm.description.should.equal('A test')
|
||||
dict(alarm.dimensions).should.equal(
|
||||
{'InstanceId': ['i-0123456,i-0123457']})
|
||||
list(alarm.alarm_actions).should.equal(['arn:alarm'])
|
||||
list(alarm.ok_actions).should.equal(['arn:ok'])
|
||||
list(alarm.insufficient_data_actions).should.equal(['arn:insufficient'])
|
||||
alarm.unit.should.equal('Seconds')
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_delete_alarm():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
alarm = alarm_fixture()
|
||||
conn.create_alarm(alarm)
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(1)
|
||||
|
||||
alarms[0].delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_put_metric_data():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
conn.put_metric_data(
|
||||
namespace='tester',
|
||||
name='metric',
|
||||
value=1.5,
|
||||
dimensions={'InstanceId': ['i-0123456,i-0123457']},
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric.namespace.should.equal('tester')
|
||||
metric.name.should.equal('metric')
|
||||
dict(metric.dimensions).should.equal(
|
||||
{'InstanceId': ['i-0123456,i-0123457']})
|
||||
|
||||
|
||||
@mock_cloudwatch_deprecated
|
||||
def test_describe_alarms():
|
||||
conn = boto.connect_cloudwatch()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
||||
conn.create_alarm(alarm_fixture(name="nfoobar", action="afoobar"))
|
||||
conn.create_alarm(alarm_fixture(name="nfoobaz", action="afoobaz"))
|
||||
conn.create_alarm(alarm_fixture(name="nbarfoo", action="abarfoo"))
|
||||
conn.create_alarm(alarm_fixture(name="nbazfoo", action="abazfoo"))
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(4)
|
||||
alarms = conn.describe_alarms(alarm_name_prefix="nfoo")
|
||||
alarms.should.have.length_of(2)
|
||||
alarms = conn.describe_alarms(
|
||||
alarm_names=["nfoobar", "nbarfoo", "nbazfoo"])
|
||||
alarms.should.have.length_of(3)
|
||||
alarms = conn.describe_alarms(action_prefix="afoo")
|
||||
alarms.should.have.length_of(2)
|
||||
|
||||
for alarm in conn.describe_alarms():
|
||||
alarm.delete()
|
||||
|
||||
alarms = conn.describe_alarms()
|
||||
alarms.should.have.length_of(0)
|
||||
|
|
|
|||
|
|
@ -1,224 +1,224 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_list_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
resp = client.list_dashboards()
|
||||
|
||||
len(resp['DashboardEntries']).should.equal(1)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_list_prefix_nomatch_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
resp = client.list_dashboards(DashboardNamePrefix='nomatch')
|
||||
|
||||
len(resp['DashboardEntries']).should.equal(0)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_delete_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test2', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test3', DashboardBody=widget)
|
||||
client.delete_dashboards(DashboardNames=['test2', 'test1'])
|
||||
|
||||
resp = client.list_dashboards(DashboardNamePrefix='test3')
|
||||
len(resp['DashboardEntries']).should.equal(1)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_delete_dashboard_fail():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test2', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test3', DashboardBody=widget)
|
||||
# Doesnt delete anything if all dashboards to be deleted do not exist
|
||||
try:
|
||||
client.delete_dashboards(DashboardNames=['test2', 'test1', 'test_no_match'])
|
||||
except ClientError as err:
|
||||
err.response['Error']['Code'].should.equal('ResourceNotFound')
|
||||
else:
|
||||
raise RuntimeError('Should of raised error')
|
||||
|
||||
resp = client.list_dashboards()
|
||||
len(resp['DashboardEntries']).should.equal(3)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
|
||||
resp = client.get_dashboard(DashboardName='test1')
|
||||
resp.should.contain('DashboardArn')
|
||||
resp.should.contain('DashboardBody')
|
||||
resp['DashboardName'].should.equal('test1')
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_dashboard_fail():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
|
||||
try:
|
||||
client.get_dashboard(DashboardName='test1')
|
||||
except ClientError as err:
|
||||
err.response['Error']['Code'].should.equal('ResourceNotFound')
|
||||
else:
|
||||
raise RuntimeError('Should of raised error')
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_alarm_state():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
|
||||
client.put_metric_alarm(
|
||||
AlarmName='testalarm1',
|
||||
MetricName='cpu',
|
||||
Namespace='blah',
|
||||
Period=10,
|
||||
EvaluationPeriods=5,
|
||||
Statistic='Average',
|
||||
Threshold=2,
|
||||
ComparisonOperator='GreaterThanThreshold',
|
||||
)
|
||||
client.put_metric_alarm(
|
||||
AlarmName='testalarm2',
|
||||
MetricName='cpu',
|
||||
Namespace='blah',
|
||||
Period=10,
|
||||
EvaluationPeriods=5,
|
||||
Statistic='Average',
|
||||
Threshold=2,
|
||||
ComparisonOperator='GreaterThanThreshold',
|
||||
)
|
||||
|
||||
# This is tested implicitly as if it doesnt work the rest will die
|
||||
client.set_alarm_state(
|
||||
AlarmName='testalarm1',
|
||||
StateValue='ALARM',
|
||||
StateReason='testreason',
|
||||
StateReasonData='{"some": "json_data"}'
|
||||
)
|
||||
|
||||
resp = client.describe_alarms(
|
||||
StateValue='ALARM'
|
||||
)
|
||||
len(resp['MetricAlarms']).should.equal(1)
|
||||
resp['MetricAlarms'][0]['AlarmName'].should.equal('testalarm1')
|
||||
resp['MetricAlarms'][0]['StateValue'].should.equal('ALARM')
|
||||
|
||||
resp = client.describe_alarms(
|
||||
StateValue='OK'
|
||||
)
|
||||
len(resp['MetricAlarms']).should.equal(1)
|
||||
resp['MetricAlarms'][0]['AlarmName'].should.equal('testalarm2')
|
||||
resp['MetricAlarms'][0]['StateValue'].should.equal('OK')
|
||||
|
||||
# Just for sanity
|
||||
resp = client.describe_alarms()
|
||||
len(resp['MetricAlarms']).should.equal(2)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_no_dimensions():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()['Metrics']
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric['Namespace'].should.equal('tester')
|
||||
metric['MetricName'].should.equal('metric')
|
||||
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_with_statistics():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='statmetric',
|
||||
Timestamp=datetime(2015, 1, 1),
|
||||
# no Value to test https://github.com/spulec/moto/issues/1615
|
||||
StatisticValues=dict(
|
||||
SampleCount=123.0,
|
||||
Sum=123.0,
|
||||
Minimum=123.0,
|
||||
Maximum=123.0
|
||||
),
|
||||
Unit='Milliseconds',
|
||||
StorageResolution=123
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()['Metrics']
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric['Namespace'].should.equal('tester')
|
||||
metric['MetricName'].should.equal('statmetric')
|
||||
# TODO: test statistics - https://github.com/spulec/moto/issues/1615
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_statistics():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
Timestamp=utc_now
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
stats = conn.get_metric_statistics(
|
||||
Namespace='tester',
|
||||
MetricName='metric',
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
Period=60,
|
||||
Statistics=['SampleCount', 'Sum']
|
||||
)
|
||||
|
||||
stats['Datapoints'].should.have.length_of(1)
|
||||
datapoint = stats['Datapoints'][0]
|
||||
datapoint['SampleCount'].should.equal(1.0)
|
||||
datapoint['Sum'].should.equal(1.5)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudwatch
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_list_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
resp = client.list_dashboards()
|
||||
|
||||
len(resp['DashboardEntries']).should.equal(1)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_list_prefix_nomatch_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
resp = client.list_dashboards(DashboardNamePrefix='nomatch')
|
||||
|
||||
len(resp['DashboardEntries']).should.equal(0)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_delete_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test2', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test3', DashboardBody=widget)
|
||||
client.delete_dashboards(DashboardNames=['test2', 'test1'])
|
||||
|
||||
resp = client.list_dashboards(DashboardNamePrefix='test3')
|
||||
len(resp['DashboardEntries']).should.equal(1)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_delete_dashboard_fail():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test2', DashboardBody=widget)
|
||||
client.put_dashboard(DashboardName='test3', DashboardBody=widget)
|
||||
# Doesnt delete anything if all dashboards to be deleted do not exist
|
||||
try:
|
||||
client.delete_dashboards(DashboardNames=['test2', 'test1', 'test_no_match'])
|
||||
except ClientError as err:
|
||||
err.response['Error']['Code'].should.equal('ResourceNotFound')
|
||||
else:
|
||||
raise RuntimeError('Should of raised error')
|
||||
|
||||
resp = client.list_dashboards()
|
||||
len(resp['DashboardEntries']).should.equal(3)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_dashboard():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
widget = '{"widgets": [{"type": "text", "x": 0, "y": 7, "width": 3, "height": 3, "properties": {"markdown": "Hello world"}}]}'
|
||||
client.put_dashboard(DashboardName='test1', DashboardBody=widget)
|
||||
|
||||
resp = client.get_dashboard(DashboardName='test1')
|
||||
resp.should.contain('DashboardArn')
|
||||
resp.should.contain('DashboardBody')
|
||||
resp['DashboardName'].should.equal('test1')
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_dashboard_fail():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
|
||||
try:
|
||||
client.get_dashboard(DashboardName='test1')
|
||||
except ClientError as err:
|
||||
err.response['Error']['Code'].should.equal('ResourceNotFound')
|
||||
else:
|
||||
raise RuntimeError('Should of raised error')
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_alarm_state():
|
||||
client = boto3.client('cloudwatch', region_name='eu-central-1')
|
||||
|
||||
client.put_metric_alarm(
|
||||
AlarmName='testalarm1',
|
||||
MetricName='cpu',
|
||||
Namespace='blah',
|
||||
Period=10,
|
||||
EvaluationPeriods=5,
|
||||
Statistic='Average',
|
||||
Threshold=2,
|
||||
ComparisonOperator='GreaterThanThreshold',
|
||||
)
|
||||
client.put_metric_alarm(
|
||||
AlarmName='testalarm2',
|
||||
MetricName='cpu',
|
||||
Namespace='blah',
|
||||
Period=10,
|
||||
EvaluationPeriods=5,
|
||||
Statistic='Average',
|
||||
Threshold=2,
|
||||
ComparisonOperator='GreaterThanThreshold',
|
||||
)
|
||||
|
||||
# This is tested implicitly as if it doesnt work the rest will die
|
||||
client.set_alarm_state(
|
||||
AlarmName='testalarm1',
|
||||
StateValue='ALARM',
|
||||
StateReason='testreason',
|
||||
StateReasonData='{"some": "json_data"}'
|
||||
)
|
||||
|
||||
resp = client.describe_alarms(
|
||||
StateValue='ALARM'
|
||||
)
|
||||
len(resp['MetricAlarms']).should.equal(1)
|
||||
resp['MetricAlarms'][0]['AlarmName'].should.equal('testalarm1')
|
||||
resp['MetricAlarms'][0]['StateValue'].should.equal('ALARM')
|
||||
|
||||
resp = client.describe_alarms(
|
||||
StateValue='OK'
|
||||
)
|
||||
len(resp['MetricAlarms']).should.equal(1)
|
||||
resp['MetricAlarms'][0]['AlarmName'].should.equal('testalarm2')
|
||||
resp['MetricAlarms'][0]['StateValue'].should.equal('OK')
|
||||
|
||||
# Just for sanity
|
||||
resp = client.describe_alarms()
|
||||
len(resp['MetricAlarms']).should.equal(2)
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_no_dimensions():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()['Metrics']
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric['Namespace'].should.equal('tester')
|
||||
metric['MetricName'].should.equal('metric')
|
||||
|
||||
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_put_metric_data_with_statistics():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='statmetric',
|
||||
Timestamp=datetime(2015, 1, 1),
|
||||
# no Value to test https://github.com/spulec/moto/issues/1615
|
||||
StatisticValues=dict(
|
||||
SampleCount=123.0,
|
||||
Sum=123.0,
|
||||
Minimum=123.0,
|
||||
Maximum=123.0
|
||||
),
|
||||
Unit='Milliseconds',
|
||||
StorageResolution=123
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
metrics = conn.list_metrics()['Metrics']
|
||||
metrics.should.have.length_of(1)
|
||||
metric = metrics[0]
|
||||
metric['Namespace'].should.equal('tester')
|
||||
metric['MetricName'].should.equal('statmetric')
|
||||
# TODO: test statistics - https://github.com/spulec/moto/issues/1615
|
||||
|
||||
@mock_cloudwatch
|
||||
def test_get_metric_statistics():
|
||||
conn = boto3.client('cloudwatch', region_name='us-east-1')
|
||||
utc_now = datetime.now(tz=pytz.utc)
|
||||
|
||||
conn.put_metric_data(
|
||||
Namespace='tester',
|
||||
MetricData=[
|
||||
dict(
|
||||
MetricName='metric',
|
||||
Value=1.5,
|
||||
Timestamp=utc_now
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
stats = conn.get_metric_statistics(
|
||||
Namespace='tester',
|
||||
MetricName='metric',
|
||||
StartTime=utc_now - timedelta(seconds=60),
|
||||
EndTime=utc_now + timedelta(seconds=60),
|
||||
Period=60,
|
||||
Statistics=['SampleCount', 'Sum']
|
||||
)
|
||||
|
||||
stats['Datapoints'].should.have.length_of(1)
|
||||
datapoint = stats['Datapoints'][0]
|
||||
datapoint['SampleCount'].should.equal(1.0)
|
||||
datapoint['Sum'].should.equal(1.5)
|
||||
|
|
|
|||
|
|
@ -1,85 +1,85 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
|
||||
from moto import mock_cognitoidentity
|
||||
import sure # noqa
|
||||
|
||||
from moto.cognitoidentity.utils import get_random_identity_id
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_create_identity_pool():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
|
||||
result = conn.create_identity_pool(IdentityPoolName='TestPool',
|
||||
AllowUnauthenticatedIdentities=False,
|
||||
SupportedLoginProviders={'graph.facebook.com': '123456789012345'},
|
||||
DeveloperProviderName='devname',
|
||||
OpenIdConnectProviderARNs=['arn:aws:rds:eu-west-2:123456789012:db:mysql-db'],
|
||||
CognitoIdentityProviders=[
|
||||
{
|
||||
'ProviderName': 'testprovider',
|
||||
'ClientId': 'CLIENT12345',
|
||||
'ServerSideTokenCheck': True
|
||||
},
|
||||
],
|
||||
SamlProviderARNs=['arn:aws:rds:eu-west-2:123456789012:db:mysql-db'])
|
||||
assert result['IdentityPoolId'] != ''
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_get_random_identity_id():
|
||||
assert len(get_random_identity_id('us-west-2')) > 0
|
||||
assert len(get_random_identity_id('us-west-2').split(':')[1]) == 19
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_id():
|
||||
# These two do NOT work in server mode. They just don't return the data from the model.
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_id(AccountId='someaccount',
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
})
|
||||
print(result)
|
||||
assert result.get('IdentityId', "").startswith('us-west-2') or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_credentials_for_identity():
|
||||
# These two do NOT work in server mode. They just don't return the data from the model.
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_credentials_for_identity(IdentityId='12345')
|
||||
|
||||
assert result.get('Expiration', 0) > 0 or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
assert result.get('IdentityId') == '12345' or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_open_id_token_for_developer_identity():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_open_id_token_for_developer_identity(
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
IdentityId='12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
},
|
||||
TokenDuration=123
|
||||
)
|
||||
assert len(result['Token'])
|
||||
assert result['IdentityId'] == '12345'
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_open_id_token_for_developer_identity_when_no_explicit_identity_id():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_open_id_token_for_developer_identity(
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
},
|
||||
TokenDuration=123
|
||||
)
|
||||
assert len(result['Token']) > 0
|
||||
assert len(result['IdentityId']) > 0
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
|
||||
from moto import mock_cognitoidentity
|
||||
import sure # noqa
|
||||
|
||||
from moto.cognitoidentity.utils import get_random_identity_id
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_create_identity_pool():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
|
||||
result = conn.create_identity_pool(IdentityPoolName='TestPool',
|
||||
AllowUnauthenticatedIdentities=False,
|
||||
SupportedLoginProviders={'graph.facebook.com': '123456789012345'},
|
||||
DeveloperProviderName='devname',
|
||||
OpenIdConnectProviderARNs=['arn:aws:rds:eu-west-2:123456789012:db:mysql-db'],
|
||||
CognitoIdentityProviders=[
|
||||
{
|
||||
'ProviderName': 'testprovider',
|
||||
'ClientId': 'CLIENT12345',
|
||||
'ServerSideTokenCheck': True
|
||||
},
|
||||
],
|
||||
SamlProviderARNs=['arn:aws:rds:eu-west-2:123456789012:db:mysql-db'])
|
||||
assert result['IdentityPoolId'] != ''
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_get_random_identity_id():
|
||||
assert len(get_random_identity_id('us-west-2')) > 0
|
||||
assert len(get_random_identity_id('us-west-2').split(':')[1]) == 19
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_id():
|
||||
# These two do NOT work in server mode. They just don't return the data from the model.
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_id(AccountId='someaccount',
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
})
|
||||
print(result)
|
||||
assert result.get('IdentityId', "").startswith('us-west-2') or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_credentials_for_identity():
|
||||
# These two do NOT work in server mode. They just don't return the data from the model.
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_credentials_for_identity(IdentityId='12345')
|
||||
|
||||
assert result.get('Expiration', 0) > 0 or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
assert result.get('IdentityId') == '12345' or result.get('ResponseMetadata').get('HTTPStatusCode') == 200
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_open_id_token_for_developer_identity():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_open_id_token_for_developer_identity(
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
IdentityId='12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
},
|
||||
TokenDuration=123
|
||||
)
|
||||
assert len(result['Token'])
|
||||
assert result['IdentityId'] == '12345'
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_open_id_token_for_developer_identity_when_no_explicit_identity_id():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_open_id_token_for_developer_identity(
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
},
|
||||
TokenDuration=123
|
||||
)
|
||||
assert len(result['Token']) > 0
|
||||
assert len(result['IdentityId']) > 0
|
||||
|
|
|
|||
|
|
@ -1,45 +1,45 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_cognitoidentity
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_create_identity_pool():
|
||||
|
||||
backend = server.create_backend_app("cognito-identity")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"IdentityPoolName": "test", "AllowUnauthenticatedIdentities": True},
|
||||
headers={
|
||||
"X-Amz-Target": "com.amazonaws.cognito.identity.model.AWSCognitoIdentityService.CreateIdentityPool"},
|
||||
)
|
||||
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert json_data['IdentityPoolName'] == "test"
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_id():
|
||||
backend = server.create_backend_app("cognito-identity")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data=json.dumps({'AccountId': 'someaccount',
|
||||
'IdentityPoolId': 'us-west-2:12345',
|
||||
'Logins': {'someurl': '12345'}}),
|
||||
headers={
|
||||
"X-Amz-Target": "com.amazonaws.cognito.identity.model.AWSCognitoIdentityService.GetId"},
|
||||
)
|
||||
|
||||
print(res.data)
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert ':' in json_data['IdentityId']
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_cognitoidentity
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_create_identity_pool():
|
||||
|
||||
backend = server.create_backend_app("cognito-identity")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"IdentityPoolName": "test", "AllowUnauthenticatedIdentities": True},
|
||||
headers={
|
||||
"X-Amz-Target": "com.amazonaws.cognito.identity.model.AWSCognitoIdentityService.CreateIdentityPool"},
|
||||
)
|
||||
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert json_data['IdentityPoolName'] == "test"
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_id():
|
||||
backend = server.create_backend_app("cognito-identity")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data=json.dumps({'AccountId': 'someaccount',
|
||||
'IdentityPoolId': 'us-west-2:12345',
|
||||
'Logins': {'someurl': '12345'}}),
|
||||
headers={
|
||||
"X-Amz-Target": "com.amazonaws.cognito.identity.model.AWSCognitoIdentityService.GetId"},
|
||||
)
|
||||
|
||||
print(res.data)
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
assert ':' in json_data['IdentityId']
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,98 +1,98 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
import unittest
|
||||
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_s3_deprecated
|
||||
|
||||
'''
|
||||
Test the different ways that the decorator can be used
|
||||
'''
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_basic_connect():
|
||||
boto.connect_ec2()
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_basic_decorator():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
|
||||
def test_context_manager():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
with mock_ec2_deprecated():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
def test_decorator_start_and_stop():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
mock = mock_ec2_deprecated()
|
||||
mock.start()
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
mock.stop()
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_decorater_wrapped_gets_set():
|
||||
"""
|
||||
Moto decorator's __wrapped__ should get set to the tests function
|
||||
"""
|
||||
test_decorater_wrapped_gets_set.__wrapped__.__name__.should.equal(
|
||||
'test_decorater_wrapped_gets_set')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
class Tester(object):
|
||||
|
||||
def test_the_class(self):
|
||||
conn = boto.connect_ec2()
|
||||
list(conn.get_all_instances()).should.have.length_of(0)
|
||||
|
||||
def test_still_the_same(self):
|
||||
conn = boto.connect_ec2()
|
||||
list(conn.get_all_instances()).should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
class TesterWithSetup(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.conn = boto.connect_s3()
|
||||
self.conn.create_bucket('mybucket')
|
||||
|
||||
def test_still_the_same(self):
|
||||
bucket = self.conn.get_bucket('mybucket')
|
||||
bucket.name.should.equal("mybucket")
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
class TesterWithStaticmethod(object):
|
||||
|
||||
@staticmethod
|
||||
def static(*args):
|
||||
assert not args or not isinstance(args[0], TesterWithStaticmethod)
|
||||
|
||||
def test_no_instance_sent_to_staticmethod(self):
|
||||
self.static()
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
import unittest
|
||||
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_s3_deprecated
|
||||
|
||||
'''
|
||||
Test the different ways that the decorator can be used
|
||||
'''
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_basic_connect():
|
||||
boto.connect_ec2()
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_basic_decorator():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
|
||||
def test_context_manager():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
with mock_ec2_deprecated():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
def test_decorator_start_and_stop():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
mock = mock_ec2_deprecated()
|
||||
mock.start()
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
list(conn.get_all_instances()).should.equal([])
|
||||
mock.stop()
|
||||
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.get_all_instances()
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_decorater_wrapped_gets_set():
|
||||
"""
|
||||
Moto decorator's __wrapped__ should get set to the tests function
|
||||
"""
|
||||
test_decorater_wrapped_gets_set.__wrapped__.__name__.should.equal(
|
||||
'test_decorater_wrapped_gets_set')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
class Tester(object):
|
||||
|
||||
def test_the_class(self):
|
||||
conn = boto.connect_ec2()
|
||||
list(conn.get_all_instances()).should.have.length_of(0)
|
||||
|
||||
def test_still_the_same(self):
|
||||
conn = boto.connect_ec2()
|
||||
list(conn.get_all_instances()).should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
class TesterWithSetup(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.conn = boto.connect_s3()
|
||||
self.conn.create_bucket('mybucket')
|
||||
|
||||
def test_still_the_same(self):
|
||||
bucket = self.conn.get_bucket('mybucket')
|
||||
bucket.name.should.equal("mybucket")
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
class TesterWithStaticmethod(object):
|
||||
|
||||
@staticmethod
|
||||
def static(*args):
|
||||
assert not args or not isinstance(args[0], TesterWithStaticmethod)
|
||||
|
||||
def test_no_instance_sent_to_staticmethod(self):
|
||||
self.static()
|
||||
|
|
|
|||
|
|
@ -1,46 +1,46 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import requests
|
||||
|
||||
from moto import mock_ec2, settings
|
||||
|
||||
if settings.TEST_SERVER_MODE:
|
||||
BASE_URL = 'http://localhost:5000'
|
||||
else:
|
||||
BASE_URL = 'http://169.254.169.254'
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_latest_meta_data():
|
||||
res = requests.get("{0}/latest/meta-data/".format(BASE_URL))
|
||||
res.content.should.equal(b"iam")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_iam():
|
||||
res = requests.get("{0}/latest/meta-data/iam".format(BASE_URL))
|
||||
json_response = res.json()
|
||||
default_role = json_response['security-credentials']['default-role']
|
||||
default_role.should.contain('AccessKeyId')
|
||||
default_role.should.contain('SecretAccessKey')
|
||||
default_role.should.contain('Token')
|
||||
default_role.should.contain('Expiration')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_security_credentials():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/".format(BASE_URL))
|
||||
res.content.should.equal(b"default-role")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_default_role():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/default-role".format(BASE_URL))
|
||||
json_response = res.json()
|
||||
json_response.should.contain('AccessKeyId')
|
||||
json_response.should.contain('SecretAccessKey')
|
||||
json_response.should.contain('Token')
|
||||
json_response.should.contain('Expiration')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import requests
|
||||
|
||||
from moto import mock_ec2, settings
|
||||
|
||||
if settings.TEST_SERVER_MODE:
|
||||
BASE_URL = 'http://localhost:5000'
|
||||
else:
|
||||
BASE_URL = 'http://169.254.169.254'
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_latest_meta_data():
|
||||
res = requests.get("{0}/latest/meta-data/".format(BASE_URL))
|
||||
res.content.should.equal(b"iam")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_iam():
|
||||
res = requests.get("{0}/latest/meta-data/iam".format(BASE_URL))
|
||||
json_response = res.json()
|
||||
default_role = json_response['security-credentials']['default-role']
|
||||
default_role.should.contain('AccessKeyId')
|
||||
default_role.should.contain('SecretAccessKey')
|
||||
default_role.should.contain('Token')
|
||||
default_role.should.contain('Expiration')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_security_credentials():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/".format(BASE_URL))
|
||||
res.content.should.equal(b"default-role")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_meta_data_default_role():
|
||||
res = requests.get(
|
||||
"{0}/latest/meta-data/iam/security-credentials/default-role".format(BASE_URL))
|
||||
json_response = res.json()
|
||||
json_response.should.contain('AccessKeyId')
|
||||
json_response.should.contain('SecretAccessKey')
|
||||
json_response.should.contain('Token')
|
||||
json_response.should.contain('Expiration')
|
||||
|
|
|
|||
|
|
@ -1,33 +1,33 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import requests
|
||||
|
||||
import boto3
|
||||
from moto import mock_sqs, settings
|
||||
|
||||
base_url = "http://localhost:5000" if settings.TEST_SERVER_MODE else "http://motoapi.amazonaws.com"
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_reset_api():
|
||||
conn = boto3.client("sqs", region_name='us-west-1')
|
||||
conn.create_queue(QueueName="queue1")
|
||||
conn.list_queues()['QueueUrls'].should.have.length_of(1)
|
||||
|
||||
res = requests.post("{base_url}/moto-api/reset".format(base_url=base_url))
|
||||
res.content.should.equal(b'{"status": "ok"}')
|
||||
|
||||
conn.list_queues().shouldnt.contain('QueueUrls') # No more queues
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_data_api():
|
||||
conn = boto3.client("sqs", region_name='us-west-1')
|
||||
conn.create_queue(QueueName="queue1")
|
||||
|
||||
res = requests.post("{base_url}/moto-api/data.json".format(base_url=base_url))
|
||||
queues = res.json()['sqs']['Queue']
|
||||
len(queues).should.equal(1)
|
||||
queue = queues[0]
|
||||
queue['name'].should.equal("queue1")
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
import requests
|
||||
|
||||
import boto3
|
||||
from moto import mock_sqs, settings
|
||||
|
||||
base_url = "http://localhost:5000" if settings.TEST_SERVER_MODE else "http://motoapi.amazonaws.com"
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_reset_api():
|
||||
conn = boto3.client("sqs", region_name='us-west-1')
|
||||
conn.create_queue(QueueName="queue1")
|
||||
conn.list_queues()['QueueUrls'].should.have.length_of(1)
|
||||
|
||||
res = requests.post("{base_url}/moto-api/reset".format(base_url=base_url))
|
||||
res.content.should.equal(b'{"status": "ok"}')
|
||||
|
||||
conn.list_queues().shouldnt.contain('QueueUrls') # No more queues
|
||||
|
||||
|
||||
@mock_sqs
|
||||
def test_data_api():
|
||||
conn = boto3.client("sqs", region_name='us-west-1')
|
||||
conn.create_queue(QueueName="queue1")
|
||||
|
||||
res = requests.post("{base_url}/moto-api/data.json".format(base_url=base_url))
|
||||
queues = res.json()['sqs']['Queue']
|
||||
len(queues).should.equal(1)
|
||||
queue = queues[0]
|
||||
queue['name'].should.equal("queue1")
|
||||
|
|
|
|||
|
|
@ -1,29 +1,29 @@
|
|||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
|
||||
from boto.sqs.connection import SQSConnection
|
||||
from boto.sqs.message import Message
|
||||
from boto.ec2 import EC2Connection
|
||||
|
||||
from moto import mock_sqs_deprecated, mock_ec2_deprecated
|
||||
|
||||
|
||||
class TestNestedDecorators(unittest.TestCase):
|
||||
|
||||
@mock_sqs_deprecated
|
||||
def setup_sqs_queue(self):
|
||||
conn = SQSConnection()
|
||||
q = conn.create_queue('some-queue')
|
||||
|
||||
m = Message()
|
||||
m.set_body('This is my first message.')
|
||||
q.write(m)
|
||||
|
||||
self.assertEqual(q.count(), 1)
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_nested(self):
|
||||
self.setup_sqs_queue()
|
||||
|
||||
conn = EC2Connection()
|
||||
conn.run_instances('ami-123456')
|
||||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
|
||||
from boto.sqs.connection import SQSConnection
|
||||
from boto.sqs.message import Message
|
||||
from boto.ec2 import EC2Connection
|
||||
|
||||
from moto import mock_sqs_deprecated, mock_ec2_deprecated
|
||||
|
||||
|
||||
class TestNestedDecorators(unittest.TestCase):
|
||||
|
||||
@mock_sqs_deprecated
|
||||
def setup_sqs_queue(self):
|
||||
conn = SQSConnection()
|
||||
q = conn.create_queue('some-queue')
|
||||
|
||||
m = Message()
|
||||
m.set_body('This is my first message.')
|
||||
q.write(m)
|
||||
|
||||
self.assertEqual(q.count(), 1)
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_nested(self):
|
||||
self.setup_sqs_queue()
|
||||
|
||||
conn = EC2Connection()
|
||||
conn.run_instances('ami-123456')
|
||||
|
|
|
|||
|
|
@ -1,81 +1,81 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto.core.responses import AWSServiceSpec
|
||||
from moto.core.responses import flatten_json_request_body
|
||||
|
||||
|
||||
def test_flatten_json_request_body():
|
||||
spec = AWSServiceSpec(
|
||||
'data/emr/2009-03-31/service-2.json').input_spec('RunJobFlow')
|
||||
|
||||
body = {
|
||||
'Name': 'cluster',
|
||||
'Instances': {
|
||||
'Ec2KeyName': 'ec2key',
|
||||
'InstanceGroups': [
|
||||
{'InstanceRole': 'MASTER',
|
||||
'InstanceType': 'm1.small'},
|
||||
{'InstanceRole': 'CORE',
|
||||
'InstanceType': 'm1.medium'},
|
||||
],
|
||||
'Placement': {'AvailabilityZone': 'us-east-1'},
|
||||
},
|
||||
'Steps': [
|
||||
{'HadoopJarStep': {
|
||||
'Properties': [
|
||||
{'Key': 'k1', 'Value': 'v1'},
|
||||
{'Key': 'k2', 'Value': 'v2'}
|
||||
],
|
||||
'Args': ['arg1', 'arg2']}},
|
||||
],
|
||||
'Configurations': [
|
||||
{'Classification': 'class',
|
||||
'Properties': {'propkey1': 'propkey1',
|
||||
'propkey2': 'propkey2'}},
|
||||
{'Classification': 'anotherclass',
|
||||
'Properties': {'propkey3': 'propkey3'}},
|
||||
]
|
||||
}
|
||||
|
||||
flat = flatten_json_request_body('', body, spec)
|
||||
flat['Name'].should.equal(body['Name'])
|
||||
flat['Instances.Ec2KeyName'].should.equal(body['Instances']['Ec2KeyName'])
|
||||
for idx in range(2):
|
||||
flat['Instances.InstanceGroups.member.' + str(idx + 1) + '.InstanceRole'].should.equal(
|
||||
body['Instances']['InstanceGroups'][idx]['InstanceRole'])
|
||||
flat['Instances.InstanceGroups.member.' + str(idx + 1) + '.InstanceType'].should.equal(
|
||||
body['Instances']['InstanceGroups'][idx]['InstanceType'])
|
||||
flat['Instances.Placement.AvailabilityZone'].should.equal(
|
||||
body['Instances']['Placement']['AvailabilityZone'])
|
||||
|
||||
for idx in range(1):
|
||||
prefix = 'Steps.member.' + str(idx + 1) + '.HadoopJarStep'
|
||||
step = body['Steps'][idx]['HadoopJarStep']
|
||||
i = 0
|
||||
while prefix + '.Properties.member.' + str(i + 1) + '.Key' in flat:
|
||||
flat[prefix + '.Properties.member.' +
|
||||
str(i + 1) + '.Key'].should.equal(step['Properties'][i]['Key'])
|
||||
flat[prefix + '.Properties.member.' +
|
||||
str(i + 1) + '.Value'].should.equal(step['Properties'][i]['Value'])
|
||||
i += 1
|
||||
i = 0
|
||||
while prefix + '.Args.member.' + str(i + 1) in flat:
|
||||
flat[prefix + '.Args.member.' +
|
||||
str(i + 1)].should.equal(step['Args'][i])
|
||||
i += 1
|
||||
|
||||
for idx in range(2):
|
||||
flat['Configurations.member.' + str(idx + 1) + '.Classification'].should.equal(
|
||||
body['Configurations'][idx]['Classification'])
|
||||
|
||||
props = {}
|
||||
i = 1
|
||||
keyfmt = 'Configurations.member.{0}.Properties.entry.{1}'
|
||||
key = keyfmt.format(idx + 1, i)
|
||||
while key + '.key' in flat:
|
||||
props[flat[key + '.key']] = flat[key + '.value']
|
||||
i += 1
|
||||
key = keyfmt.format(idx + 1, i)
|
||||
props.should.equal(body['Configurations'][idx]['Properties'])
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto.core.responses import AWSServiceSpec
|
||||
from moto.core.responses import flatten_json_request_body
|
||||
|
||||
|
||||
def test_flatten_json_request_body():
|
||||
spec = AWSServiceSpec(
|
||||
'data/emr/2009-03-31/service-2.json').input_spec('RunJobFlow')
|
||||
|
||||
body = {
|
||||
'Name': 'cluster',
|
||||
'Instances': {
|
||||
'Ec2KeyName': 'ec2key',
|
||||
'InstanceGroups': [
|
||||
{'InstanceRole': 'MASTER',
|
||||
'InstanceType': 'm1.small'},
|
||||
{'InstanceRole': 'CORE',
|
||||
'InstanceType': 'm1.medium'},
|
||||
],
|
||||
'Placement': {'AvailabilityZone': 'us-east-1'},
|
||||
},
|
||||
'Steps': [
|
||||
{'HadoopJarStep': {
|
||||
'Properties': [
|
||||
{'Key': 'k1', 'Value': 'v1'},
|
||||
{'Key': 'k2', 'Value': 'v2'}
|
||||
],
|
||||
'Args': ['arg1', 'arg2']}},
|
||||
],
|
||||
'Configurations': [
|
||||
{'Classification': 'class',
|
||||
'Properties': {'propkey1': 'propkey1',
|
||||
'propkey2': 'propkey2'}},
|
||||
{'Classification': 'anotherclass',
|
||||
'Properties': {'propkey3': 'propkey3'}},
|
||||
]
|
||||
}
|
||||
|
||||
flat = flatten_json_request_body('', body, spec)
|
||||
flat['Name'].should.equal(body['Name'])
|
||||
flat['Instances.Ec2KeyName'].should.equal(body['Instances']['Ec2KeyName'])
|
||||
for idx in range(2):
|
||||
flat['Instances.InstanceGroups.member.' + str(idx + 1) + '.InstanceRole'].should.equal(
|
||||
body['Instances']['InstanceGroups'][idx]['InstanceRole'])
|
||||
flat['Instances.InstanceGroups.member.' + str(idx + 1) + '.InstanceType'].should.equal(
|
||||
body['Instances']['InstanceGroups'][idx]['InstanceType'])
|
||||
flat['Instances.Placement.AvailabilityZone'].should.equal(
|
||||
body['Instances']['Placement']['AvailabilityZone'])
|
||||
|
||||
for idx in range(1):
|
||||
prefix = 'Steps.member.' + str(idx + 1) + '.HadoopJarStep'
|
||||
step = body['Steps'][idx]['HadoopJarStep']
|
||||
i = 0
|
||||
while prefix + '.Properties.member.' + str(i + 1) + '.Key' in flat:
|
||||
flat[prefix + '.Properties.member.' +
|
||||
str(i + 1) + '.Key'].should.equal(step['Properties'][i]['Key'])
|
||||
flat[prefix + '.Properties.member.' +
|
||||
str(i + 1) + '.Value'].should.equal(step['Properties'][i]['Value'])
|
||||
i += 1
|
||||
i = 0
|
||||
while prefix + '.Args.member.' + str(i + 1) in flat:
|
||||
flat[prefix + '.Args.member.' +
|
||||
str(i + 1)].should.equal(step['Args'][i])
|
||||
i += 1
|
||||
|
||||
for idx in range(2):
|
||||
flat['Configurations.member.' + str(idx + 1) + '.Classification'].should.equal(
|
||||
body['Configurations'][idx]['Classification'])
|
||||
|
||||
props = {}
|
||||
i = 1
|
||||
keyfmt = 'Configurations.member.{0}.Properties.entry.{1}'
|
||||
key = keyfmt.format(idx + 1, i)
|
||||
while key + '.key' in flat:
|
||||
props[flat[key + '.key']] = flat[key + '.value']
|
||||
i += 1
|
||||
key = keyfmt.format(idx + 1, i)
|
||||
props.should.equal(body['Configurations'][idx]['Properties'])
|
||||
|
|
|
|||
|
|
@ -1,53 +1,53 @@
|
|||
from __future__ import unicode_literals
|
||||
from mock import patch
|
||||
import sure # noqa
|
||||
|
||||
from moto.server import main, create_backend_app, DomainDispatcherApplication
|
||||
|
||||
|
||||
def test_wrong_arguments():
|
||||
try:
|
||||
main(["name", "test1", "test2", "test3"])
|
||||
assert False, ("main() when called with the incorrect number of args"
|
||||
" should raise a system exit")
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
|
||||
@patch('moto.server.run_simple')
|
||||
def test_right_arguments(run_simple):
|
||||
main(["s3"])
|
||||
func_call = run_simple.call_args[0]
|
||||
func_call[0].should.equal("127.0.0.1")
|
||||
func_call[1].should.equal(5000)
|
||||
|
||||
|
||||
@patch('moto.server.run_simple')
|
||||
def test_port_argument(run_simple):
|
||||
main(["s3", "--port", "8080"])
|
||||
func_call = run_simple.call_args[0]
|
||||
func_call[0].should.equal("127.0.0.1")
|
||||
func_call[1].should.equal(8080)
|
||||
|
||||
|
||||
def test_domain_dispatched():
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app)
|
||||
backend_app = dispatcher.get_application(
|
||||
{"HTTP_HOST": "email.us-east1.amazonaws.com"})
|
||||
keys = list(backend_app.view_functions.keys())
|
||||
keys[0].should.equal('EmailResponse.dispatch')
|
||||
|
||||
|
||||
def test_domain_without_matches():
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app)
|
||||
dispatcher.get_application.when.called_with(
|
||||
{"HTTP_HOST": "not-matching-anything.com"}).should.throw(RuntimeError)
|
||||
|
||||
|
||||
def test_domain_dispatched_with_service():
|
||||
# If we pass a particular service, always return that.
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
||||
backend_app = dispatcher.get_application(
|
||||
{"HTTP_HOST": "s3.us-east1.amazonaws.com"})
|
||||
keys = set(backend_app.view_functions.keys())
|
||||
keys.should.contain('ResponseObject.key_response')
|
||||
from __future__ import unicode_literals
|
||||
from mock import patch
|
||||
import sure # noqa
|
||||
|
||||
from moto.server import main, create_backend_app, DomainDispatcherApplication
|
||||
|
||||
|
||||
def test_wrong_arguments():
|
||||
try:
|
||||
main(["name", "test1", "test2", "test3"])
|
||||
assert False, ("main() when called with the incorrect number of args"
|
||||
" should raise a system exit")
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
|
||||
@patch('moto.server.run_simple')
|
||||
def test_right_arguments(run_simple):
|
||||
main(["s3"])
|
||||
func_call = run_simple.call_args[0]
|
||||
func_call[0].should.equal("127.0.0.1")
|
||||
func_call[1].should.equal(5000)
|
||||
|
||||
|
||||
@patch('moto.server.run_simple')
|
||||
def test_port_argument(run_simple):
|
||||
main(["s3", "--port", "8080"])
|
||||
func_call = run_simple.call_args[0]
|
||||
func_call[0].should.equal("127.0.0.1")
|
||||
func_call[1].should.equal(8080)
|
||||
|
||||
|
||||
def test_domain_dispatched():
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app)
|
||||
backend_app = dispatcher.get_application(
|
||||
{"HTTP_HOST": "email.us-east1.amazonaws.com"})
|
||||
keys = list(backend_app.view_functions.keys())
|
||||
keys[0].should.equal('EmailResponse.dispatch')
|
||||
|
||||
|
||||
def test_domain_without_matches():
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app)
|
||||
dispatcher.get_application.when.called_with(
|
||||
{"HTTP_HOST": "not-matching-anything.com"}).should.throw(RuntimeError)
|
||||
|
||||
|
||||
def test_domain_dispatched_with_service():
|
||||
# If we pass a particular service, always return that.
|
||||
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
||||
backend_app = dispatcher.get_application(
|
||||
{"HTTP_HOST": "s3.us-east1.amazonaws.com"})
|
||||
keys = set(backend_app.view_functions.keys())
|
||||
keys.should.contain('ResponseObject.key_response')
|
||||
|
|
|
|||
|
|
@ -1,22 +1,22 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
from moto.core.utils import convert_regex_to_flask_path
|
||||
|
||||
|
||||
def test_flask_path_converting_simple():
|
||||
convert_regex_to_flask_path("/").should.equal("/")
|
||||
convert_regex_to_flask_path("/$").should.equal("/")
|
||||
|
||||
convert_regex_to_flask_path("/foo").should.equal("/foo")
|
||||
|
||||
convert_regex_to_flask_path("/foo/bar/").should.equal("/foo/bar/")
|
||||
|
||||
|
||||
def test_flask_path_converting_regex():
|
||||
convert_regex_to_flask_path(
|
||||
"/(?P<key_name>[a-zA-Z0-9\-_]+)").should.equal('/<regex("[a-zA-Z0-9\-_]+"):key_name>')
|
||||
|
||||
convert_regex_to_flask_path("(?P<account_id>\d+)/(?P<queue_name>.*)$").should.equal(
|
||||
'<regex("\d+"):account_id>/<regex(".*"):queue_name>'
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
from moto.core.utils import convert_regex_to_flask_path
|
||||
|
||||
|
||||
def test_flask_path_converting_simple():
|
||||
convert_regex_to_flask_path("/").should.equal("/")
|
||||
convert_regex_to_flask_path("/$").should.equal("/")
|
||||
|
||||
convert_regex_to_flask_path("/foo").should.equal("/foo")
|
||||
|
||||
convert_regex_to_flask_path("/foo/bar/").should.equal("/foo/bar/")
|
||||
|
||||
|
||||
def test_flask_path_converting_regex():
|
||||
convert_regex_to_flask_path(
|
||||
"/(?P<key_name>[a-zA-Z0-9\-_]+)").should.equal('/<regex("[a-zA-Z0-9\-_]+"):key_name>')
|
||||
|
||||
convert_regex_to_flask_path("(?P<account_id>\d+)/(?P<queue_name>.*)$").should.equal(
|
||||
'<regex("\d+"):account_id>/<regex(".*"):queue_name>'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,30 +1,30 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto.core.utils import camelcase_to_underscores, underscores_to_camelcase, unix_time
|
||||
|
||||
|
||||
def test_camelcase_to_underscores():
|
||||
cases = {
|
||||
"theNewAttribute": "the_new_attribute",
|
||||
"attri bute With Space": "attribute_with_space",
|
||||
"FirstLetterCapital": "first_letter_capital",
|
||||
"ListMFADevices": "list_mfa_devices",
|
||||
}
|
||||
for arg, expected in cases.items():
|
||||
camelcase_to_underscores(arg).should.equal(expected)
|
||||
|
||||
|
||||
def test_underscores_to_camelcase():
|
||||
cases = {
|
||||
"the_new_attribute": "theNewAttribute",
|
||||
}
|
||||
for arg, expected in cases.items():
|
||||
underscores_to_camelcase(arg).should.equal(expected)
|
||||
|
||||
|
||||
@freeze_time("2015-01-01 12:00:00")
|
||||
def test_unix_time():
|
||||
unix_time().should.equal(1420113600.0)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto.core.utils import camelcase_to_underscores, underscores_to_camelcase, unix_time
|
||||
|
||||
|
||||
def test_camelcase_to_underscores():
|
||||
cases = {
|
||||
"theNewAttribute": "the_new_attribute",
|
||||
"attri bute With Space": "attribute_with_space",
|
||||
"FirstLetterCapital": "first_letter_capital",
|
||||
"ListMFADevices": "list_mfa_devices",
|
||||
}
|
||||
for arg, expected in cases.items():
|
||||
camelcase_to_underscores(arg).should.equal(expected)
|
||||
|
||||
|
||||
def test_underscores_to_camelcase():
|
||||
cases = {
|
||||
"the_new_attribute": "theNewAttribute",
|
||||
}
|
||||
for arg, expected in cases.items():
|
||||
underscores_to_camelcase(arg).should.equal(expected)
|
||||
|
||||
|
||||
@freeze_time("2015-01-01 12:00:00")
|
||||
def test_unix_time():
|
||||
unix_time().should.equal(1420113600.0)
|
||||
|
|
|
|||
|
|
@ -1,204 +1,204 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto.datapipeline
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_datapipeline_deprecated
|
||||
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
||||
|
||||
|
||||
def get_value_from_fields(key, fields):
|
||||
for field in fields:
|
||||
if field['key'] == key:
|
||||
return field['stringValue']
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_create_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
pipeline_descriptions = conn.describe_pipelines(
|
||||
[pipeline_id])["pipelineDescriptionList"]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
pipeline_description['name'].should.equal("mypipeline")
|
||||
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
||||
fields = pipeline_description['fields']
|
||||
|
||||
get_value_from_fields('@pipelineState', fields).should.equal("PENDING")
|
||||
get_value_from_fields('uniqueId', fields).should.equal("some-unique-id")
|
||||
|
||||
|
||||
PIPELINE_OBJECTS = [
|
||||
{
|
||||
"id": "Default",
|
||||
"name": "Default",
|
||||
"fields": [{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"id": "Schedule",
|
||||
"name": "Schedule",
|
||||
"fields": [{
|
||||
"key": "startDateTime",
|
||||
"stringValue": "2012-12-12T00:00:00"
|
||||
}, {
|
||||
"key": "type",
|
||||
"stringValue": "Schedule"
|
||||
}, {
|
||||
"key": "period",
|
||||
"stringValue": "1 hour"
|
||||
}, {
|
||||
"key": "endDateTime",
|
||||
"stringValue": "2012-12-21T18:00:00"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"id": "SayHello",
|
||||
"name": "SayHello",
|
||||
"fields": [{
|
||||
"key": "type",
|
||||
"stringValue": "ShellCommandActivity"
|
||||
}, {
|
||||
"key": "command",
|
||||
"stringValue": "echo hello"
|
||||
}, {
|
||||
"key": "parent",
|
||||
"refValue": "Default"
|
||||
}, {
|
||||
"key": "schedule",
|
||||
"refValue": "Schedule"
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_creating_pipeline_definition():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
||||
pipeline_definition['pipelineObjects'].should.have.length_of(3)
|
||||
default_object = pipeline_definition['pipelineObjects'][0]
|
||||
default_object['name'].should.equal("Default")
|
||||
default_object['id'].should.equal("Default")
|
||||
default_object['fields'].should.equal([{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}])
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_describing_pipeline_objects():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
||||
'pipelineObjects']
|
||||
|
||||
objects.should.have.length_of(2)
|
||||
default_object = [x for x in objects if x['id'] == 'Default'][0]
|
||||
default_object['name'].should.equal("Default")
|
||||
default_object['fields'].should.equal([{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}])
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_activate_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
conn.activate_pipeline(pipeline_id)
|
||||
|
||||
pipeline_descriptions = conn.describe_pipelines(
|
||||
[pipeline_id])["pipelineDescriptionList"]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
fields = pipeline_description['fields']
|
||||
|
||||
get_value_from_fields('@pipelineState', fields).should.equal("SCHEDULED")
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_delete_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.delete_pipeline(pipeline_id)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["pipelineIdList"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
||||
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(False)
|
||||
response["marker"].should.be.none
|
||||
response["pipelineIdList"].should.have.length_of(2)
|
||||
response["pipelineIdList"].should.contain({
|
||||
"id": res1["pipelineId"],
|
||||
"name": "mypipeline1",
|
||||
})
|
||||
response["pipelineIdList"].should.contain({
|
||||
"id": res2["pipelineId"],
|
||||
"name": "mypipeline2"
|
||||
})
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_paginated_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
for i in range(100):
|
||||
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(True)
|
||||
response["marker"].should.equal(response["pipelineIdList"][-1]['id'])
|
||||
response["pipelineIdList"].should.have.length_of(50)
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_remove_capitalization_of_dict_keys():
|
||||
result = remove_capitalization_of_dict_keys(
|
||||
{
|
||||
"Id": "IdValue",
|
||||
"Fields": [{
|
||||
"Key": "KeyValue",
|
||||
"StringValue": "StringValueValue"
|
||||
}]
|
||||
}
|
||||
)
|
||||
|
||||
result.should.equal({
|
||||
"id": "IdValue",
|
||||
"fields": [{
|
||||
"key": "KeyValue",
|
||||
"stringValue": "StringValueValue"
|
||||
}],
|
||||
})
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.datapipeline
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_datapipeline_deprecated
|
||||
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
|
||||
|
||||
|
||||
def get_value_from_fields(key, fields):
|
||||
for field in fields:
|
||||
if field['key'] == key:
|
||||
return field['stringValue']
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_create_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
pipeline_descriptions = conn.describe_pipelines(
|
||||
[pipeline_id])["pipelineDescriptionList"]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
pipeline_description['name'].should.equal("mypipeline")
|
||||
pipeline_description["pipelineId"].should.equal(pipeline_id)
|
||||
fields = pipeline_description['fields']
|
||||
|
||||
get_value_from_fields('@pipelineState', fields).should.equal("PENDING")
|
||||
get_value_from_fields('uniqueId', fields).should.equal("some-unique-id")
|
||||
|
||||
|
||||
PIPELINE_OBJECTS = [
|
||||
{
|
||||
"id": "Default",
|
||||
"name": "Default",
|
||||
"fields": [{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"id": "Schedule",
|
||||
"name": "Schedule",
|
||||
"fields": [{
|
||||
"key": "startDateTime",
|
||||
"stringValue": "2012-12-12T00:00:00"
|
||||
}, {
|
||||
"key": "type",
|
||||
"stringValue": "Schedule"
|
||||
}, {
|
||||
"key": "period",
|
||||
"stringValue": "1 hour"
|
||||
}, {
|
||||
"key": "endDateTime",
|
||||
"stringValue": "2012-12-21T18:00:00"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"id": "SayHello",
|
||||
"name": "SayHello",
|
||||
"fields": [{
|
||||
"key": "type",
|
||||
"stringValue": "ShellCommandActivity"
|
||||
}, {
|
||||
"key": "command",
|
||||
"stringValue": "echo hello"
|
||||
}, {
|
||||
"key": "parent",
|
||||
"refValue": "Default"
|
||||
}, {
|
||||
"key": "schedule",
|
||||
"refValue": "Schedule"
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_creating_pipeline_definition():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
|
||||
pipeline_definition['pipelineObjects'].should.have.length_of(3)
|
||||
default_object = pipeline_definition['pipelineObjects'][0]
|
||||
default_object['name'].should.equal("Default")
|
||||
default_object['id'].should.equal("Default")
|
||||
default_object['fields'].should.equal([{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}])
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_describing_pipeline_objects():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
|
||||
|
||||
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
|
||||
'pipelineObjects']
|
||||
|
||||
objects.should.have.length_of(2)
|
||||
default_object = [x for x in objects if x['id'] == 'Default'][0]
|
||||
default_object['name'].should.equal("Default")
|
||||
default_object['fields'].should.equal([{
|
||||
"key": "workerGroup",
|
||||
"stringValue": "workerGroup"
|
||||
}])
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_activate_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
|
||||
pipeline_id = res["pipelineId"]
|
||||
conn.activate_pipeline(pipeline_id)
|
||||
|
||||
pipeline_descriptions = conn.describe_pipelines(
|
||||
[pipeline_id])["pipelineDescriptionList"]
|
||||
pipeline_descriptions.should.have.length_of(1)
|
||||
pipeline_description = pipeline_descriptions[0]
|
||||
fields = pipeline_description['fields']
|
||||
|
||||
get_value_from_fields('@pipelineState', fields).should.equal("SCHEDULED")
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_delete_pipeline():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res = conn.create_pipeline("mypipeline", "some-unique-id")
|
||||
pipeline_id = res["pipelineId"]
|
||||
|
||||
conn.delete_pipeline(pipeline_id)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["pipelineIdList"].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
|
||||
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(False)
|
||||
response["marker"].should.be.none
|
||||
response["pipelineIdList"].should.have.length_of(2)
|
||||
response["pipelineIdList"].should.contain({
|
||||
"id": res1["pipelineId"],
|
||||
"name": "mypipeline1",
|
||||
})
|
||||
response["pipelineIdList"].should.contain({
|
||||
"id": res2["pipelineId"],
|
||||
"name": "mypipeline2"
|
||||
})
|
||||
|
||||
|
||||
@mock_datapipeline_deprecated
|
||||
def test_listing_paginated_pipelines():
|
||||
conn = boto.datapipeline.connect_to_region("us-west-2")
|
||||
for i in range(100):
|
||||
conn.create_pipeline("mypipeline%d" % i, "some-unique-id%d" % i)
|
||||
|
||||
response = conn.list_pipelines()
|
||||
|
||||
response["hasMoreResults"].should.be(True)
|
||||
response["marker"].should.equal(response["pipelineIdList"][-1]['id'])
|
||||
response["pipelineIdList"].should.have.length_of(50)
|
||||
|
||||
|
||||
# testing a helper function
|
||||
def test_remove_capitalization_of_dict_keys():
|
||||
result = remove_capitalization_of_dict_keys(
|
||||
{
|
||||
"Id": "IdValue",
|
||||
"Fields": [{
|
||||
"Key": "KeyValue",
|
||||
"StringValue": "StringValueValue"
|
||||
}]
|
||||
}
|
||||
)
|
||||
|
||||
result.should.equal({
|
||||
"id": "IdValue",
|
||||
"fields": [{
|
||||
"key": "KeyValue",
|
||||
"stringValue": "StringValueValue"
|
||||
}],
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,28 +1,28 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_datapipeline
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_datapipeline
|
||||
def test_list_streams():
|
||||
backend = server.create_backend_app("datapipeline")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"pipelineIds": ["ASdf"]},
|
||||
headers={
|
||||
"X-Amz-Target": "DataPipeline.DescribePipelines"},
|
||||
)
|
||||
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
json_data.should.equal({
|
||||
'pipelineDescriptionList': []
|
||||
})
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_datapipeline
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_datapipeline
|
||||
def test_list_streams():
|
||||
backend = server.create_backend_app("datapipeline")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.post('/',
|
||||
data={"pipelineIds": ["ASdf"]},
|
||||
headers={
|
||||
"X-Amz-Target": "DataPipeline.DescribePipelines"},
|
||||
)
|
||||
|
||||
json_data = json.loads(res.data.decode("utf-8"))
|
||||
json_data.should.equal({
|
||||
'pipelineDescriptionList': []
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,54 +1,54 @@
|
|||
from __future__ import unicode_literals
|
||||
import six
|
||||
import boto
|
||||
import boto.dynamodb
|
||||
import sure # noqa
|
||||
import requests
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_dynamodb, mock_dynamodb_deprecated
|
||||
from moto.dynamodb import dynamodb_backend
|
||||
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_list_tables():
|
||||
name = 'TestTable'
|
||||
dynamodb_backend.create_table(
|
||||
name, hash_key_attr="name", hash_key_type="S")
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
assert conn.list_tables() == ['TestTable']
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_list_tables_layer_1():
|
||||
dynamodb_backend.create_table(
|
||||
"test_1", hash_key_attr="name", hash_key_type="S")
|
||||
dynamodb_backend.create_table(
|
||||
"test_2", hash_key_attr="name", hash_key_type="S")
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
res = conn.layer1.list_tables(limit=1)
|
||||
expected = {"TableNames": ["test_1"], "LastEvaluatedTableName": "test_1"}
|
||||
res.should.equal(expected)
|
||||
|
||||
res = conn.layer1.list_tables(limit=1, start_table="test_1")
|
||||
expected = {"TableNames": ["test_2"]}
|
||||
res.should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_describe_missing_table():
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
with assert_raises(DynamoDBResponseError):
|
||||
conn.describe_table('messages')
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_dynamodb_with_connect_to_region():
|
||||
# this will work if connected with boto.connect_dynamodb()
|
||||
dynamodb = boto.dynamodb.connect_to_region('us-west-2')
|
||||
|
||||
schema = dynamodb.create_schema('column1', str(), 'column2', int())
|
||||
dynamodb.create_table('table1', schema, 200, 200)
|
||||
from __future__ import unicode_literals
|
||||
import six
|
||||
import boto
|
||||
import boto.dynamodb
|
||||
import sure # noqa
|
||||
import requests
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_dynamodb, mock_dynamodb_deprecated
|
||||
from moto.dynamodb import dynamodb_backend
|
||||
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_list_tables():
|
||||
name = 'TestTable'
|
||||
dynamodb_backend.create_table(
|
||||
name, hash_key_attr="name", hash_key_type="S")
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
assert conn.list_tables() == ['TestTable']
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_list_tables_layer_1():
|
||||
dynamodb_backend.create_table(
|
||||
"test_1", hash_key_attr="name", hash_key_type="S")
|
||||
dynamodb_backend.create_table(
|
||||
"test_2", hash_key_attr="name", hash_key_type="S")
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
res = conn.layer1.list_tables(limit=1)
|
||||
expected = {"TableNames": ["test_1"], "LastEvaluatedTableName": "test_1"}
|
||||
res.should.equal(expected)
|
||||
|
||||
res = conn.layer1.list_tables(limit=1, start_table="test_1")
|
||||
expected = {"TableNames": ["test_2"]}
|
||||
res.should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_describe_missing_table():
|
||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||
with assert_raises(DynamoDBResponseError):
|
||||
conn.describe_table('messages')
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_dynamodb_with_connect_to_region():
|
||||
# this will work if connected with boto.connect_dynamodb()
|
||||
dynamodb = boto.dynamodb.connect_to_region('us-west-2')
|
||||
|
||||
schema = dynamodb.create_schema('column1', str(), 'column2', int())
|
||||
dynamodb.create_table('table1', schema, 200, 200)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,430 +1,430 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name='forum_name',
|
||||
hash_key_proto_value=str,
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name='messages',
|
||||
schema=message_table_schema,
|
||||
read_units=10,
|
||||
write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
'Table': {
|
||||
'CreationDateTime': 1326499200.0,
|
||||
'ItemCount': 0,
|
||||
'KeySchema': {
|
||||
'HashKeyElement': {
|
||||
'AttributeName': 'forum_name',
|
||||
'AttributeType': 'S'
|
||||
},
|
||||
},
|
||||
'ProvisionedThroughput': {
|
||||
'ReadCapacityUnits': 10,
|
||||
'WriteCapacityUnits': 10
|
||||
},
|
||||
'TableName': 'messages',
|
||||
'TableSizeBytes': 0,
|
||||
'TableStatus': 'ACTIVE',
|
||||
}
|
||||
}
|
||||
conn.describe_table('messages').should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table('messages')
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with(
|
||||
'messages').should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attributes_to_get=['Body', 'SentBy']
|
||||
)
|
||||
dict(returned_item).should.equal({
|
||||
'forum_name': 'LOLCat Forum',
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
})
|
||||
|
||||
item['SentBy'] = 'User B'
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attributes_to_get=['Body', 'SentBy']
|
||||
)
|
||||
dict(returned_item).should.equal({
|
||||
'forum_name': 'LOLCat Forum',
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
})
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
item=dict(
|
||||
hash_key='LOLCat Forum',
|
||||
),
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(
|
||||
hash_key='tester',
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
key={
|
||||
'HashKeyElement': {'S': 'tester'},
|
||||
},
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({u'Attributes': [], u'ConsumedCapacityUnits': 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values='ALL_OLD')
|
||||
response.should.equal({
|
||||
u'Attributes': {
|
||||
u'Body': u'http://url_to_lolcat.gif',
|
||||
u'forum_name': u'LOLCat Forum',
|
||||
u'ReceivedTime': u'12/9/2011 11:36:03 PM',
|
||||
u'SentBy': u'User A',
|
||||
},
|
||||
u'ConsumedCapacityUnits': 0.5
|
||||
})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
key={
|
||||
'HashKeyElement': {'S': 'tester'},
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key='the-key')
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
hash_key_value={'S': 'the-key'},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key3',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response['Items'].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={'SentBy': condition.EQ('User B')})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Body': condition.BEGINS_WITH('http')})
|
||||
results.response['Items'].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.CONTAINS(2)})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.NOT_NULL()})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.NULL()})
|
||||
results.response['Items'].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={'PK': condition.BETWEEN(8, 9)})
|
||||
results.response['Items'].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={'PK': condition.BETWEEN(5, 8)})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{
|
||||
"S": "User B"}
|
||||
],
|
||||
"ComparisonOperator": "EQ"
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item('the-key')
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs={
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
},
|
||||
))
|
||||
|
||||
items.append(table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs={
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
},
|
||||
))
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[('the-key')])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key1',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='another-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([('the-key1'), ('another-key')])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.have.equal(2)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto
|
||||
import sure # noqa
|
||||
from freezegun import freeze_time
|
||||
|
||||
from moto import mock_dynamodb_deprecated
|
||||
|
||||
from boto.dynamodb import condition
|
||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||
from boto.exception import DynamoDBResponseError
|
||||
|
||||
|
||||
def create_table(conn):
|
||||
message_table_schema = conn.create_schema(
|
||||
hash_key_name='forum_name',
|
||||
hash_key_proto_value=str,
|
||||
)
|
||||
|
||||
table = conn.create_table(
|
||||
name='messages',
|
||||
schema=message_table_schema,
|
||||
read_units=10,
|
||||
write_units=10
|
||||
)
|
||||
return table
|
||||
|
||||
|
||||
@freeze_time("2012-01-14")
|
||||
@mock_dynamodb_deprecated
|
||||
def test_create_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
|
||||
expected = {
|
||||
'Table': {
|
||||
'CreationDateTime': 1326499200.0,
|
||||
'ItemCount': 0,
|
||||
'KeySchema': {
|
||||
'HashKeyElement': {
|
||||
'AttributeName': 'forum_name',
|
||||
'AttributeType': 'S'
|
||||
},
|
||||
},
|
||||
'ProvisionedThroughput': {
|
||||
'ReadCapacityUnits': 10,
|
||||
'WriteCapacityUnits': 10
|
||||
},
|
||||
'TableName': 'messages',
|
||||
'TableSizeBytes': 0,
|
||||
'TableStatus': 'ACTIVE',
|
||||
}
|
||||
}
|
||||
conn.describe_table('messages').should.equal(expected)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
create_table(conn)
|
||||
conn.list_tables().should.have.length_of(1)
|
||||
|
||||
conn.layer1.delete_table('messages')
|
||||
conn.list_tables().should.have.length_of(0)
|
||||
|
||||
conn.layer1.delete_table.when.called_with(
|
||||
'messages').should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_update_table_throughput():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
table.read_units.should.equal(10)
|
||||
table.write_units.should.equal(10)
|
||||
|
||||
table.update_throughput(5, 6)
|
||||
table.refresh()
|
||||
|
||||
table.read_units.should.equal(5)
|
||||
table.write_units.should.equal(6)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_add_and_describe_and_update():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attributes_to_get=['Body', 'SentBy']
|
||||
)
|
||||
dict(returned_item).should.equal({
|
||||
'forum_name': 'LOLCat Forum',
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
})
|
||||
|
||||
item['SentBy'] = 'User B'
|
||||
item.put()
|
||||
|
||||
returned_item = table.get_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attributes_to_get=['Body', 'SentBy']
|
||||
)
|
||||
dict(returned_item).should.equal({
|
||||
'forum_name': 'LOLCat Forum',
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
})
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_item_put_without_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.put_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
item=dict(
|
||||
hash_key='LOLCat Forum',
|
||||
),
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_missing_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
table.get_item.when.called_with(
|
||||
hash_key='tester',
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_get_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.get_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
key={
|
||||
'HashKeyElement': {'S': 'tester'},
|
||||
},
|
||||
).should.throw(DynamoDBKeyNotFoundError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete()
|
||||
response.should.equal({u'Attributes': [], u'ConsumedCapacityUnits': 0.5})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_attribute_response():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='LOLCat Forum',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
response = item.delete(return_values='ALL_OLD')
|
||||
response.should.equal({
|
||||
u'Attributes': {
|
||||
u'Body': u'http://url_to_lolcat.gif',
|
||||
u'forum_name': u'LOLCat Forum',
|
||||
u'ReceivedTime': u'12/9/2011 11:36:03 PM',
|
||||
u'SentBy': u'User A',
|
||||
},
|
||||
u'ConsumedCapacityUnits': 0.5
|
||||
})
|
||||
table.refresh()
|
||||
table.item_count.should.equal(0)
|
||||
|
||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_delete_item_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.delete_item.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
key={
|
||||
'HashKeyElement': {'S': 'tester'},
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
results = table.query(hash_key='the-key')
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_query_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.query.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
hash_key_value={'S': 'the-key'},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key3',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
results = table.scan()
|
||||
results.response['Items'].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={'SentBy': condition.EQ('User B')})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Body': condition.BEGINS_WITH('http')})
|
||||
results.response['Items'].should.have.length_of(3)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.CONTAINS(2)})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.NOT_NULL()})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
results = table.scan(scan_filter={'Ids': condition.NULL()})
|
||||
results.response['Items'].should.have.length_of(2)
|
||||
|
||||
results = table.scan(scan_filter={'PK': condition.BETWEEN(8, 9)})
|
||||
results.response['Items'].should.have.length_of(0)
|
||||
|
||||
results = table.scan(scan_filter={'PK': condition.BETWEEN(5, 8)})
|
||||
results.response['Items'].should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_with_undeclared_table():
|
||||
conn = boto.connect_dynamodb()
|
||||
|
||||
conn.layer1.scan.when.called_with(
|
||||
table_name='undeclared-table',
|
||||
scan_filter={
|
||||
"SentBy": {
|
||||
"AttributeValueList": [{
|
||||
"S": "User B"}
|
||||
],
|
||||
"ComparisonOperator": "EQ"
|
||||
}
|
||||
},
|
||||
).should.throw(DynamoDBResponseError)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_scan_after_has_item():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
table.has_item('the-key')
|
||||
|
||||
list(table.scan()).should.equal([])
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_write_batch():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
|
||||
items = []
|
||||
items.append(table.new_item(
|
||||
hash_key='the-key',
|
||||
attrs={
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
},
|
||||
))
|
||||
|
||||
items.append(table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs={
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
},
|
||||
))
|
||||
|
||||
batch_list.add_batch(table, puts=items)
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(2)
|
||||
|
||||
batch_list = conn.new_batch_write_list()
|
||||
batch_list.add_batch(table, deletes=[('the-key')])
|
||||
conn.batch_write_item(batch_list)
|
||||
|
||||
table.refresh()
|
||||
table.item_count.should.equal(1)
|
||||
|
||||
|
||||
@mock_dynamodb_deprecated
|
||||
def test_batch_read():
|
||||
conn = boto.connect_dynamodb()
|
||||
table = create_table(conn)
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User A',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='the-key1',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item = table.new_item(
|
||||
hash_key='the-key2',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
item_data = {
|
||||
'Body': 'http://url_to_lolcat.gif',
|
||||
'SentBy': 'User B',
|
||||
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||
'Ids': set([1, 2, 3]),
|
||||
'PK': 7,
|
||||
}
|
||||
item = table.new_item(
|
||||
hash_key='another-key',
|
||||
attrs=item_data,
|
||||
)
|
||||
item.put()
|
||||
|
||||
items = table.batch_get_item([('the-key1'), ('another-key')])
|
||||
# Iterate through so that batch_item gets called
|
||||
count = len([x for x in items])
|
||||
count.should.have.equal(2)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_table_list():
|
||||
backend = server.create_backend_app("dynamodb")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/')
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||
res = test_client.get('/', headers=headers)
|
||||
res.data.should.contain(b'TableNames')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_table_list():
|
||||
backend = server.create_backend_app("dynamodb")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/')
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||
res = test_client.get('/', headers=headers)
|
||||
res.data.should.contain(b'TableNames')
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,19 +1,19 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_table_list():
|
||||
backend = server.create_backend_app("dynamodb2")
|
||||
test_client = backend.test_client()
|
||||
res = test_client.get('/')
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||
res = test_client.get('/', headers=headers)
|
||||
res.data.should.contain(b'TableNames')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_table_list():
|
||||
backend = server.create_backend_app("dynamodb2")
|
||||
test_client = backend.test_client()
|
||||
res = test_client.get('/')
|
||||
res.status_code.should.equal(404)
|
||||
|
||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||
res = test_client.get('/', headers=headers)
|
||||
res.data.should.contain(b'TableNames')
|
||||
|
|
|
|||
|
|
@ -1,44 +1,44 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from moto import mock_ec2
|
||||
import sure # noqa
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_account_attributes():
|
||||
conn = boto3.client('ec2', region_name='us-east-1')
|
||||
response = conn.describe_account_attributes()
|
||||
expected_attribute_values = [{
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'vpc-max-security-groups-per-interface'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '20'
|
||||
}],
|
||||
'AttributeName': 'max-instances'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': 'EC2'
|
||||
}, {
|
||||
'AttributeValue': 'VPC'
|
||||
}],
|
||||
'AttributeName': 'supported-platforms'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': 'none'
|
||||
}],
|
||||
'AttributeName': 'default-vpc'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'max-elastic-ips'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'vpc-max-elastic-ips'
|
||||
}]
|
||||
response['AccountAttributes'].should.equal(expected_attribute_values)
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
from moto import mock_ec2
|
||||
import sure # noqa
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_account_attributes():
|
||||
conn = boto3.client('ec2', region_name='us-east-1')
|
||||
response = conn.describe_account_attributes()
|
||||
expected_attribute_values = [{
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'vpc-max-security-groups-per-interface'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '20'
|
||||
}],
|
||||
'AttributeName': 'max-instances'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': 'EC2'
|
||||
}, {
|
||||
'AttributeValue': 'VPC'
|
||||
}],
|
||||
'AttributeName': 'supported-platforms'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': 'none'
|
||||
}],
|
||||
'AttributeName': 'default-vpc'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'max-elastic-ips'
|
||||
}, {
|
||||
'AttributeValues': [{
|
||||
'AttributeValue': '5'
|
||||
}],
|
||||
'AttributeName': 'vpc-max-elastic-ips'
|
||||
}]
|
||||
response['AccountAttributes'].should.equal(expected_attribute_values)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_amazon_dev_pay():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_amazon_dev_pay():
|
||||
pass
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,54 +1,54 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import boto.ec2
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_regions():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
regions = conn.get_all_regions()
|
||||
regions.should.have.length_of(16)
|
||||
for region in regions:
|
||||
region.endpoint.should.contain(region.name)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_availability_zones():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
regions = conn.get_all_regions()
|
||||
for region in regions:
|
||||
conn = boto.ec2.connect_to_region(region.name)
|
||||
if conn is None:
|
||||
continue
|
||||
for zone in conn.get_all_zones():
|
||||
zone.name.should.contain(region.name)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_describe_regions():
|
||||
ec2 = boto3.client('ec2', 'us-east-1')
|
||||
resp = ec2.describe_regions()
|
||||
resp['Regions'].should.have.length_of(16)
|
||||
for rec in resp['Regions']:
|
||||
rec['Endpoint'].should.contain(rec['RegionName'])
|
||||
|
||||
test_region = 'us-east-1'
|
||||
resp = ec2.describe_regions(RegionNames=[test_region])
|
||||
resp['Regions'].should.have.length_of(1)
|
||||
resp['Regions'][0].should.have.key('RegionName').which.should.equal(test_region)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_availability_zones():
|
||||
ec2 = boto3.client('ec2', 'us-east-1')
|
||||
resp = ec2.describe_regions()
|
||||
regions = [r['RegionName'] for r in resp['Regions']]
|
||||
for region in regions:
|
||||
conn = boto3.client('ec2', region)
|
||||
resp = conn.describe_availability_zones()
|
||||
for rec in resp['AvailabilityZones']:
|
||||
rec['ZoneName'].should.contain(region)
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import boto.ec2
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_regions():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
regions = conn.get_all_regions()
|
||||
regions.should.have.length_of(16)
|
||||
for region in regions:
|
||||
region.endpoint.should.contain(region.name)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_availability_zones():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
regions = conn.get_all_regions()
|
||||
for region in regions:
|
||||
conn = boto.ec2.connect_to_region(region.name)
|
||||
if conn is None:
|
||||
continue
|
||||
for zone in conn.get_all_zones():
|
||||
zone.name.should.contain(region.name)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_describe_regions():
|
||||
ec2 = boto3.client('ec2', 'us-east-1')
|
||||
resp = ec2.describe_regions()
|
||||
resp['Regions'].should.have.length_of(16)
|
||||
for rec in resp['Regions']:
|
||||
rec['Endpoint'].should.contain(rec['RegionName'])
|
||||
|
||||
test_region = 'us-east-1'
|
||||
resp = ec2.describe_regions(RegionNames=[test_region])
|
||||
resp['Regions'].should.have.length_of(1)
|
||||
resp['Regions'][0].should.have.key('RegionName').which.should.equal(test_region)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_availability_zones():
|
||||
ec2 = boto3.client('ec2', 'us-east-1')
|
||||
resp = ec2.describe_regions()
|
||||
regions = [r['RegionName'] for r in resp['Regions']]
|
||||
for region in regions:
|
||||
conn = boto3.client('ec2', region)
|
||||
resp = conn.describe_availability_zones()
|
||||
for rec in resp['AvailabilityZones']:
|
||||
rec['ZoneName'].should.contain(region)
|
||||
|
|
|
|||
|
|
@ -1,52 +1,52 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from nose.tools import assert_false
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
customer_gateway.should_not.be.none
|
||||
customer_gateway.id.should.match(r'cgw-\w+')
|
||||
customer_gateway.type.should.equal('ipsec.1')
|
||||
customer_gateway.bgp_asn.should.equal(65534)
|
||||
customer_gateway.ip_address.should.equal('205.251.242.54')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws.should.have.length_of(1)
|
||||
cgws[0].id.should.match(customer_gateway.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
customer_gateway.should_not.be.none
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws[0].id.should.match(customer_gateway.id)
|
||||
deleted = conn.delete_customer_gateway(customer_gateway.id)
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_customer_gateways_bad_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_customer_gateway('cgw-0123abcd')
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from nose.tools import assert_false
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
customer_gateway.should_not.be.none
|
||||
customer_gateway.id.should.match(r'cgw-\w+')
|
||||
customer_gateway.type.should.equal('ipsec.1')
|
||||
customer_gateway.bgp_asn.should.equal(65534)
|
||||
customer_gateway.ip_address.should.equal('205.251.242.54')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws.should.have.length_of(1)
|
||||
cgws[0].id.should.match(customer_gateway.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_customer_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
customer_gateway = conn.create_customer_gateway(
|
||||
'ipsec.1', '205.251.242.54', 65534)
|
||||
customer_gateway.should_not.be.none
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws[0].id.should.match(customer_gateway.id)
|
||||
deleted = conn.delete_customer_gateway(customer_gateway.id)
|
||||
cgws = conn.get_all_customer_gateways()
|
||||
cgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_customer_gateways_bad_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_customer_gateway('cgw-0123abcd')
|
||||
|
|
|
|||
|
|
@ -1,333 +1,333 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
SAMPLE_DOMAIN_NAME = u'example.com'
|
||||
SAMPLE_NAME_SERVERS = [u'10.0.0.6', u'10.0.0.7']
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate():
|
||||
""" associate dhcp option """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
rval = conn.associate_dhcp_options(dhcp_options.id, vpc.id)
|
||||
rval.should.be.equal(True)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate_invalid_dhcp_id():
|
||||
""" associate dhcp option bad dhcp options id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options("foo", vpc.id)
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate_invalid_vpc_id():
|
||||
""" associate dhcp option invalid vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options(dhcp_options.id, "foo")
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_delete_with_vpc():
|
||||
"""Test deletion of dhcp options with vpc"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
dhcp_options_id = dhcp_options.id
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
rval = conn.associate_dhcp_options(dhcp_options_id, vpc.id)
|
||||
rval.should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options(dhcp_options_id)
|
||||
cm.exception.code.should.equal('DependencyViolation')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
vpc.delete()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_options_id])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_dhcp_options():
|
||||
"""Create most basic dhcp option"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
dhcp_option.options[u'domain-name'][0].should.be.equal(SAMPLE_DOMAIN_NAME)
|
||||
dhcp_option.options[
|
||||
u'domain-name-servers'][0].should.be.equal(SAMPLE_NAME_SERVERS[0])
|
||||
dhcp_option.options[
|
||||
u'domain-name-servers'][1].should.be.equal(SAMPLE_NAME_SERVERS[1])
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_dhcp_options_invalid_options():
|
||||
"""Create invalid dhcp options"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
servers = ["f", "f", "f", "f", "f"]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(ntp_servers=servers)
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(netbios_node_type="0")
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_dhcp_options():
|
||||
"""Test dhcp options lookup by id"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
dhcp_options = conn.get_all_dhcp_options([dhcp_option.id])
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
dhcp_options = conn.get_all_dhcp_options()
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_dhcp_options_invalid_id():
|
||||
"""get error on invalid dhcp_option_id lookup"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options(["1"])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options():
|
||||
"""delete dhcp option"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
dhcp_options = conn.get_all_dhcp_options([dhcp_option.id])
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
conn.delete_dhcp_options(dhcp_option.id) # .should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_option.id])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options_invalid_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("dopt-abcd1234")
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options_malformed_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("foo-abcd1234")
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionsId.Malformed')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
|
||||
dhcp_option.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the DHCP options
|
||||
dhcp_option = conn.get_all_dhcp_options()[0]
|
||||
dhcp_option.tags.should.have.length_of(1)
|
||||
dhcp_option.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_tag():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp1 = conn.create_dhcp_options('example.com', ['10.0.10.2'])
|
||||
dhcp1.add_tag('Name', 'TestDhcpOptions1')
|
||||
dhcp1.add_tag('test-tag', 'test-value')
|
||||
|
||||
dhcp2 = conn.create_dhcp_options('example.com', ['10.0.20.2'])
|
||||
dhcp2.add_tag('Name', 'TestDhcpOptions2')
|
||||
dhcp2.add_tag('test-tag', 'test-value')
|
||||
|
||||
filters = {'tag:Name': 'TestDhcpOptions1', 'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name'][0].should.be.equal('example.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.10.2')
|
||||
dhcp_options_sets[0].tags['Name'].should.equal('TestDhcpOptions1')
|
||||
dhcp_options_sets[0].tags['test-tag'].should.equal('test-value')
|
||||
|
||||
filters = {'tag:Name': 'TestDhcpOptions2', 'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name'][0].should.be.equal('example.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.20.2')
|
||||
dhcp_options_sets[0].tags['Name'].should.equal('TestDhcpOptions2')
|
||||
dhcp_options_sets[0].tags['test-tag'].should.equal('test-value')
|
||||
|
||||
filters = {'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp1 = conn.create_dhcp_options('test1.com', ['10.0.10.2'])
|
||||
dhcp1.add_tag('Name', 'TestDhcpOptions1')
|
||||
dhcp1.add_tag('test-tag', 'test-value')
|
||||
dhcp1_id = dhcp1.id
|
||||
|
||||
dhcp2 = conn.create_dhcp_options('test2.com', ['10.0.20.2'])
|
||||
dhcp2.add_tag('Name', 'TestDhcpOptions2')
|
||||
dhcp2.add_tag('test-tag', 'test-value')
|
||||
dhcp2_id = dhcp2.id
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options()
|
||||
dhcp_options_sets.should.have.length_of(2)
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(
|
||||
filters={'dhcp-options-id': dhcp1_id})
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options['domain-name'][0].should.be.equal('test1.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.10.2')
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(
|
||||
filters={'dhcp-options-id': dhcp2_id})
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options['domain-name'][0].should.be.equal('test2.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.20.2')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_dhcp_options_get_by_value_filter():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.10.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.20.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.30.2']}
|
||||
])
|
||||
|
||||
filters = [{'Name': 'value', 'Values': ['10.0.10.2']}]
|
||||
dhcp_options_sets = list(ec2.dhcp_options_sets.filter(Filters=filters))
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_dhcp_options_get_by_key_filter():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.10.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.20.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.30.2']}
|
||||
])
|
||||
|
||||
filters = [{'Name': 'key', 'Values': ['domain-name']}]
|
||||
dhcp_options_sets = list(ec2.dhcp_options_sets.filter(Filters=filters))
|
||||
dhcp_options_sets.should.have.length_of(3)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_invalid_filter():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options(SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
filters = {'invalid-filter': 'invalid-value'}
|
||||
|
||||
conn.get_all_dhcp_options.when.called_with(
|
||||
filters=filters).should.throw(NotImplementedError)
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
|
||||
SAMPLE_DOMAIN_NAME = u'example.com'
|
||||
SAMPLE_NAME_SERVERS = [u'10.0.0.6', u'10.0.0.7']
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate():
|
||||
""" associate dhcp option """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
rval = conn.associate_dhcp_options(dhcp_options.id, vpc.id)
|
||||
rval.should.be.equal(True)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate_invalid_dhcp_id():
|
||||
""" associate dhcp option bad dhcp options id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options("foo", vpc.id)
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_associate_invalid_vpc_id():
|
||||
""" associate dhcp option invalid vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.associate_dhcp_options(dhcp_options.id, "foo")
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_delete_with_vpc():
|
||||
"""Test deletion of dhcp options with vpc"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_options = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
dhcp_options_id = dhcp_options.id
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
rval = conn.associate_dhcp_options(dhcp_options_id, vpc.id)
|
||||
rval.should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options(dhcp_options_id)
|
||||
cm.exception.code.should.equal('DependencyViolation')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
vpc.delete()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_options_id])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_dhcp_options():
|
||||
"""Create most basic dhcp option"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options(
|
||||
SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
dhcp_option.options[u'domain-name'][0].should.be.equal(SAMPLE_DOMAIN_NAME)
|
||||
dhcp_option.options[
|
||||
u'domain-name-servers'][0].should.be.equal(SAMPLE_NAME_SERVERS[0])
|
||||
dhcp_option.options[
|
||||
u'domain-name-servers'][1].should.be.equal(SAMPLE_NAME_SERVERS[1])
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_dhcp_options_invalid_options():
|
||||
"""Create invalid dhcp options"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
servers = ["f", "f", "f", "f", "f"]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(ntp_servers=servers)
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_dhcp_options(netbios_node_type="0")
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_dhcp_options():
|
||||
"""Test dhcp options lookup by id"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
dhcp_options = conn.get_all_dhcp_options([dhcp_option.id])
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
dhcp_options = conn.get_all_dhcp_options()
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_dhcp_options_invalid_id():
|
||||
"""get error on invalid dhcp_option_id lookup"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options(["1"])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options():
|
||||
"""delete dhcp option"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
dhcp_options = conn.get_all_dhcp_options([dhcp_option.id])
|
||||
dhcp_options.should.be.length_of(1)
|
||||
|
||||
conn.delete_dhcp_options(dhcp_option.id) # .should.be.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_dhcp_options([dhcp_option.id])
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options_invalid_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("dopt-abcd1234")
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_dhcp_options_malformed_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_dhcp_options("foo-abcd1234")
|
||||
cm.exception.code.should.equal('InvalidDhcpOptionsId.Malformed')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
dhcp_option = conn.create_dhcp_options()
|
||||
|
||||
dhcp_option.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the DHCP options
|
||||
dhcp_option = conn.get_all_dhcp_options()[0]
|
||||
dhcp_option.tags.should.have.length_of(1)
|
||||
dhcp_option.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_tag():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp1 = conn.create_dhcp_options('example.com', ['10.0.10.2'])
|
||||
dhcp1.add_tag('Name', 'TestDhcpOptions1')
|
||||
dhcp1.add_tag('test-tag', 'test-value')
|
||||
|
||||
dhcp2 = conn.create_dhcp_options('example.com', ['10.0.20.2'])
|
||||
dhcp2.add_tag('Name', 'TestDhcpOptions2')
|
||||
dhcp2.add_tag('test-tag', 'test-value')
|
||||
|
||||
filters = {'tag:Name': 'TestDhcpOptions1', 'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name'][0].should.be.equal('example.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.10.2')
|
||||
dhcp_options_sets[0].tags['Name'].should.equal('TestDhcpOptions1')
|
||||
dhcp_options_sets[0].tags['test-tag'].should.equal('test-value')
|
||||
|
||||
filters = {'tag:Name': 'TestDhcpOptions2', 'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name'][0].should.be.equal('example.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.20.2')
|
||||
dhcp_options_sets[0].tags['Name'].should.equal('TestDhcpOptions2')
|
||||
dhcp_options_sets[0].tags['test-tag'].should.equal('test-value')
|
||||
|
||||
filters = {'tag:test-tag': 'test-value'}
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(filters=filters)
|
||||
|
||||
dhcp_options_sets.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
dhcp1 = conn.create_dhcp_options('test1.com', ['10.0.10.2'])
|
||||
dhcp1.add_tag('Name', 'TestDhcpOptions1')
|
||||
dhcp1.add_tag('test-tag', 'test-value')
|
||||
dhcp1_id = dhcp1.id
|
||||
|
||||
dhcp2 = conn.create_dhcp_options('test2.com', ['10.0.20.2'])
|
||||
dhcp2.add_tag('Name', 'TestDhcpOptions2')
|
||||
dhcp2.add_tag('test-tag', 'test-value')
|
||||
dhcp2_id = dhcp2.id
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options()
|
||||
dhcp_options_sets.should.have.length_of(2)
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(
|
||||
filters={'dhcp-options-id': dhcp1_id})
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options['domain-name'][0].should.be.equal('test1.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.10.2')
|
||||
|
||||
dhcp_options_sets = conn.get_all_dhcp_options(
|
||||
filters={'dhcp-options-id': dhcp2_id})
|
||||
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
dhcp_options_sets[0].options['domain-name'][0].should.be.equal('test2.com')
|
||||
dhcp_options_sets[0].options[
|
||||
'domain-name-servers'][0].should.be.equal('10.0.20.2')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_dhcp_options_get_by_value_filter():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.10.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.20.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.30.2']}
|
||||
])
|
||||
|
||||
filters = [{'Name': 'value', 'Values': ['10.0.10.2']}]
|
||||
dhcp_options_sets = list(ec2.dhcp_options_sets.filter(Filters=filters))
|
||||
dhcp_options_sets.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_dhcp_options_get_by_key_filter():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.10.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.20.2']}
|
||||
])
|
||||
|
||||
ec2.create_dhcp_options(DhcpConfigurations=[
|
||||
{'Key': 'domain-name', 'Values': ['example.com']},
|
||||
{'Key': 'domain-name-servers', 'Values': ['10.0.30.2']}
|
||||
])
|
||||
|
||||
filters = [{'Name': 'key', 'Values': ['domain-name']}]
|
||||
dhcp_options_sets = list(ec2.dhcp_options_sets.filter(Filters=filters))
|
||||
dhcp_options_sets.should.have.length_of(3)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_dhcp_options_get_by_invalid_filter():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.create_dhcp_options(SAMPLE_DOMAIN_NAME, SAMPLE_NAME_SERVERS)
|
||||
filters = {'invalid-filter': 'invalid-value'}
|
||||
|
||||
conn.get_all_dhcp_options.when.called_with(
|
||||
filters=filters).should.throw(NotImplementedError)
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,362 +1,362 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import boto
|
||||
import boto.cloudformation
|
||||
import boto.ec2
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_cloudformation_deprecated, mock_ec2_deprecated
|
||||
from tests.helpers import requires_boto_gte
|
||||
from tests.test_cloudformation.fixtures import vpc_eni
|
||||
import json
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
eni = conn.create_network_interface(subnet.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
eni = conn.create_network_interface(subnet.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(0)
|
||||
eni.private_ip_addresses.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.delete_network_interface(eni.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.delete_network_interface(eni.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_network_interface(eni.id)
|
||||
cm.exception.error_code.should.equal('InvalidNetworkInterfaceID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_subnet_validation():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_network_interface("subnet-abcd1234")
|
||||
cm.exception.error_code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_with_private_ip():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
private_ip = "54.0.0.1"
|
||||
eni = conn.create_network_interface(subnet.id, private_ip)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(0)
|
||||
|
||||
eni.private_ip_addresses.should.have.length_of(1)
|
||||
eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_with_groups():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id, security_group2.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(2)
|
||||
set([group.id for group in eni.groups]).should.equal(
|
||||
set([security_group1.id, security_group2.id]))
|
||||
|
||||
|
||||
@requires_boto_gte("2.12.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_modify_attribute():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
conn.create_network_interface(subnet.id, groups=[security_group1.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(1)
|
||||
eni.groups[0].id.should.equal(security_group1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.modify_network_interface_attribute(
|
||||
eni.id, 'groupset', [security_group2.id], dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the ModifyNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.modify_network_interface_attribute(
|
||||
eni.id, 'groupset', [security_group2.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(1)
|
||||
eni.groups[0].id.should.equal(security_group2.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_filtering():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
|
||||
eni1 = conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id, security_group2.id])
|
||||
eni2 = conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id])
|
||||
eni3 = conn.create_network_interface(subnet.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(3)
|
||||
|
||||
# Filter by NetworkInterfaceId
|
||||
enis_by_id = conn.get_all_network_interfaces([eni1.id])
|
||||
enis_by_id.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id]))
|
||||
|
||||
# Filter by ENI ID
|
||||
enis_by_id = conn.get_all_network_interfaces(
|
||||
filters={'network-interface-id': eni1.id})
|
||||
enis_by_id.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id]))
|
||||
|
||||
# Filter by Security Group
|
||||
enis_by_group = conn.get_all_network_interfaces(
|
||||
filters={'group-id': security_group1.id})
|
||||
enis_by_group.should.have.length_of(2)
|
||||
set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id, eni2.id]))
|
||||
|
||||
# Filter by ENI ID and Security Group
|
||||
enis_by_group = conn.get_all_network_interfaces(
|
||||
filters={'network-interface-id': eni1.id, 'group-id': security_group1.id})
|
||||
enis_by_group.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id]))
|
||||
|
||||
# Unsupported filter
|
||||
conn.get_all_network_interfaces.when.called_with(
|
||||
filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_tag_name():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}], DryRun=True)
|
||||
ex.exception.response['Error']['Code'].should.equal('DryRunOperation')
|
||||
ex.exception.response['ResponseMetadata'][
|
||||
'HTTPStatusCode'].should.equal(400)
|
||||
ex.exception.response['Error']['Message'].should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}])
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'tag:Name', 'Values': ['eni1']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'tag:Name', 'Values': ['wrong-name']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_availability_zone():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet1 = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
subnet2 = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.1.0/24', AvailabilityZone='us-west-2b')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet1.id, PrivateIpAddress='10.0.0.15')
|
||||
|
||||
eni2 = ec2.create_network_interface(
|
||||
SubnetId=subnet2.id, PrivateIpAddress='10.0.1.15')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id, eni2.id])
|
||||
|
||||
filters = [{'Name': 'availability-zone', 'Values': ['us-west-2a']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'availability-zone', 'Values': ['us-west-2c']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_private_ip():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.5']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.10']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.5']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.10']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_vpc_id():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'vpc-id', 'Values': [subnet.vpc_id]}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'vpc-id', 'Values': ['vpc-aaaa1111']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_subnet_id():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'subnet-id', 'Values': [subnet.id]}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'subnet-id', 'Values': ['subnet-aaaa1111']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@mock_cloudformation_deprecated
|
||||
def test_elastic_network_interfaces_cloudformation():
|
||||
template = vpc_eni.template
|
||||
template_json = json.dumps(template)
|
||||
conn = boto.cloudformation.connect_to_region("us-west-1")
|
||||
conn.create_stack(
|
||||
"test_stack",
|
||||
template_body=template_json,
|
||||
)
|
||||
ec2_conn = boto.ec2.connect_to_region("us-west-1")
|
||||
eni = ec2_conn.get_all_network_interfaces()[0]
|
||||
|
||||
stack = conn.describe_stacks()[0]
|
||||
resources = stack.describe_resources()
|
||||
cfn_eni = [resource for resource in resources if resource.resource_type ==
|
||||
'AWS::EC2::NetworkInterface'][0]
|
||||
cfn_eni.physical_resource_id.should.equal(eni.id)
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import boto
|
||||
import boto.cloudformation
|
||||
import boto.ec2
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_cloudformation_deprecated, mock_ec2_deprecated
|
||||
from tests.helpers import requires_boto_gte
|
||||
from tests.test_cloudformation.fixtures import vpc_eni
|
||||
import json
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
eni = conn.create_network_interface(subnet.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
eni = conn.create_network_interface(subnet.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(0)
|
||||
eni.private_ip_addresses.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.delete_network_interface(eni.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.delete_network_interface(eni.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_network_interface(eni.id)
|
||||
cm.exception.error_code.should.equal('InvalidNetworkInterfaceID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_subnet_validation():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_network_interface("subnet-abcd1234")
|
||||
cm.exception.error_code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_with_private_ip():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
private_ip = "54.0.0.1"
|
||||
eni = conn.create_network_interface(subnet.id, private_ip)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(0)
|
||||
|
||||
eni.private_ip_addresses.should.have.length_of(1)
|
||||
eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_with_groups():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id, security_group2.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(2)
|
||||
set([group.id for group in eni.groups]).should.equal(
|
||||
set([security_group1.id, security_group2.id]))
|
||||
|
||||
|
||||
@requires_boto_gte("2.12.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_modify_attribute():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
conn.create_network_interface(subnet.id, groups=[security_group1.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(1)
|
||||
eni.groups[0].id.should.equal(security_group1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.modify_network_interface_attribute(
|
||||
eni.id, 'groupset', [security_group2.id], dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the ModifyNetworkInterface operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.modify_network_interface_attribute(
|
||||
eni.id, 'groupset', [security_group2.id])
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(1)
|
||||
|
||||
eni = all_enis[0]
|
||||
eni.groups.should.have.length_of(1)
|
||||
eni.groups[0].id.should.equal(security_group2.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_elastic_network_interfaces_filtering():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
security_group1 = conn.create_security_group(
|
||||
'test security group #1', 'this is a test security group')
|
||||
security_group2 = conn.create_security_group(
|
||||
'test security group #2', 'this is a test security group')
|
||||
|
||||
eni1 = conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id, security_group2.id])
|
||||
eni2 = conn.create_network_interface(
|
||||
subnet.id, groups=[security_group1.id])
|
||||
eni3 = conn.create_network_interface(subnet.id)
|
||||
|
||||
all_enis = conn.get_all_network_interfaces()
|
||||
all_enis.should.have.length_of(3)
|
||||
|
||||
# Filter by NetworkInterfaceId
|
||||
enis_by_id = conn.get_all_network_interfaces([eni1.id])
|
||||
enis_by_id.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id]))
|
||||
|
||||
# Filter by ENI ID
|
||||
enis_by_id = conn.get_all_network_interfaces(
|
||||
filters={'network-interface-id': eni1.id})
|
||||
enis_by_id.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_id]).should.equal(set([eni1.id]))
|
||||
|
||||
# Filter by Security Group
|
||||
enis_by_group = conn.get_all_network_interfaces(
|
||||
filters={'group-id': security_group1.id})
|
||||
enis_by_group.should.have.length_of(2)
|
||||
set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id, eni2.id]))
|
||||
|
||||
# Filter by ENI ID and Security Group
|
||||
enis_by_group = conn.get_all_network_interfaces(
|
||||
filters={'network-interface-id': eni1.id, 'group-id': security_group1.id})
|
||||
enis_by_group.should.have.length_of(1)
|
||||
set([eni.id for eni in enis_by_group]).should.equal(set([eni1.id]))
|
||||
|
||||
# Unsupported filter
|
||||
conn.get_all_network_interfaces.when.called_with(
|
||||
filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_tag_name():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}], DryRun=True)
|
||||
ex.exception.response['Error']['Code'].should.equal('DryRunOperation')
|
||||
ex.exception.response['ResponseMetadata'][
|
||||
'HTTPStatusCode'].should.equal(400)
|
||||
ex.exception.response['Error']['Message'].should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
eni1.create_tags(Tags=[{'Key': 'Name', 'Value': 'eni1'}])
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'tag:Name', 'Values': ['eni1']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'tag:Name', 'Values': ['wrong-name']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_availability_zone():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet1 = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
subnet2 = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.1.0/24', AvailabilityZone='us-west-2b')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet1.id, PrivateIpAddress='10.0.0.15')
|
||||
|
||||
eni2 = ec2.create_network_interface(
|
||||
SubnetId=subnet2.id, PrivateIpAddress='10.0.1.15')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id, eni2.id])
|
||||
|
||||
filters = [{'Name': 'availability-zone', 'Values': ['us-west-2a']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'availability-zone', 'Values': ['us-west-2c']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_private_ip():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.5']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'private-ip-address', 'Values': ['10.0.10.10']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.5']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'addresses.private-ip-address', 'Values': ['10.0.10.10']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_vpc_id():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'vpc-id', 'Values': [subnet.vpc_id]}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'vpc-id', 'Values': ['vpc-aaaa1111']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_elastic_network_interfaces_get_by_subnet_id():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||
ec2_client = boto3.client('ec2', region_name='us-west-2')
|
||||
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-2a')
|
||||
|
||||
eni1 = ec2.create_network_interface(
|
||||
SubnetId=subnet.id, PrivateIpAddress='10.0.10.5')
|
||||
|
||||
# The status of the new interface should be 'available'
|
||||
waiter = ec2_client.get_waiter('network_interface_available')
|
||||
waiter.wait(NetworkInterfaceIds=[eni1.id])
|
||||
|
||||
filters = [{'Name': 'subnet-id', 'Values': [subnet.id]}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(1)
|
||||
|
||||
filters = [{'Name': 'subnet-id', 'Values': ['subnet-aaaa1111']}]
|
||||
enis = list(ec2.network_interfaces.filter(Filters=filters))
|
||||
enis.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@mock_cloudformation_deprecated
|
||||
def test_elastic_network_interfaces_cloudformation():
|
||||
template = vpc_eni.template
|
||||
template_json = json.dumps(template)
|
||||
conn = boto.cloudformation.connect_to_region("us-west-1")
|
||||
conn.create_stack(
|
||||
"test_stack",
|
||||
template_body=template_json,
|
||||
)
|
||||
ec2_conn = boto.ec2.connect_to_region("us-west-1")
|
||||
eni = ec2_conn.get_all_network_interfaces()[0]
|
||||
|
||||
stack = conn.describe_stacks()[0]
|
||||
resources = stack.describe_resources()
|
||||
cfn_eni = [resource for resource in resources if resource.resource_type ==
|
||||
'AWS::EC2::NetworkInterface'][0]
|
||||
cfn_eni.physical_resource_id.should.equal(eni.id)
|
||||
|
|
|
|||
|
|
@ -1,42 +1,42 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_console_output():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance_id = reservation.instances[0].id
|
||||
output = conn.get_console_output(instance_id)
|
||||
output.output.should_not.equal(None)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_console_output_without_instance():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_console_output('i-1234abcd')
|
||||
cm.exception.code.should.equal('InvalidInstanceID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_console_output_boto3():
|
||||
conn = boto3.resource('ec2', 'us-east-1')
|
||||
instances = conn.create_instances(ImageId='ami-1234abcd',
|
||||
MinCount=1,
|
||||
MaxCount=1)
|
||||
|
||||
output = instances[0].console_output()
|
||||
output.get('Output').should_not.equal(None)
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_console_output():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance_id = reservation.instances[0].id
|
||||
output = conn.get_console_output(instance_id)
|
||||
output.output.should_not.equal(None)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_console_output_without_instance():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_console_output('i-1234abcd')
|
||||
cm.exception.code.should.equal('InvalidInstanceID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_console_output_boto3():
|
||||
conn = boto3.resource('ec2', 'us-east-1')
|
||||
instances = conn.create_instances(ImageId='ami-1234abcd',
|
||||
MinCount=1,
|
||||
MaxCount=1)
|
||||
|
||||
output = instances[0].console_output()
|
||||
output.get('Output').should_not.equal(None)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,269 +1,269 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import re
|
||||
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
VPC_CIDR = "10.0.0.0/16"
|
||||
BAD_VPC = "vpc-deadbeef"
|
||||
BAD_IGW = "igw-deadbeef"
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_create():
|
||||
""" internet gateway create """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
igw = conn.create_internet_gateway(dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
igw = conn.create_internet_gateway()
|
||||
conn.get_all_internet_gateways().should.have.length_of(1)
|
||||
igw.id.should.match(r'igw-[0-9a-f]+')
|
||||
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach():
|
||||
""" internet gateway attach """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.attach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the AttachInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments[0].vpc_id.should.be.equal(vpc.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach_bad_vpc():
|
||||
""" internet gateway fail to attach w/ bad vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach_twice():
|
||||
""" internet gateway fail to attach twice """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc1 = conn.create_vpc(VPC_CIDR)
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal('Resource.AlreadyAssociated')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach():
|
||||
""" internet gateway detach"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DetachInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.detach_internet_gateway(igw.id, vpc.id)
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_wrong_vpc():
|
||||
""" internet gateway fail to detach w/ wrong vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc1 = conn.create_vpc(VPC_CIDR)
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_invalid_vpc():
|
||||
""" internet gateway fail to detach w/ invalid vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_unattached():
|
||||
""" internet gateway fail to detach unattached """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_delete():
|
||||
""" internet gateway delete"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
igw = conn.create_internet_gateway()
|
||||
conn.get_all_internet_gateways().should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.delete_internet_gateway(igw.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.delete_internet_gateway(igw.id)
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_delete_attached():
|
||||
""" internet gateway fail to delete attached """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_internet_gateway(igw.id)
|
||||
cm.exception.code.should.equal('DependencyViolation')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_desribe():
|
||||
""" internet gateway fetch by id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
igw_by_search = conn.get_all_internet_gateways([igw.id])[0]
|
||||
igw.id.should.equal(igw_by_search.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_describe_bad_id():
|
||||
""" internet gateway fail to fetch by bad id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_internet_gateways([BAD_IGW])
|
||||
cm.exception.code.should.equal('InvalidInternetGatewayID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_vpc_id():
|
||||
""" internet gateway filter by vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw1.id, vpc.id)
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"attachment.vpc-id": vpc.id})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_tags():
|
||||
""" internet gateway filter by vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
igw1.add_tag("tests", "yes")
|
||||
|
||||
result = conn.get_all_internet_gateways(filters={"tag:tests": "yes"})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_internet_gateway_id():
|
||||
""" internet gateway filter by internet gateway id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"internet-gateway-id": igw1.id})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_attachment_state():
|
||||
""" internet gateway filter by attachment state """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw1.id, vpc.id)
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"attachment.state": "available"})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import re
|
||||
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
VPC_CIDR = "10.0.0.0/16"
|
||||
BAD_VPC = "vpc-deadbeef"
|
||||
BAD_IGW = "igw-deadbeef"
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_create():
|
||||
""" internet gateway create """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
igw = conn.create_internet_gateway(dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
igw = conn.create_internet_gateway()
|
||||
conn.get_all_internet_gateways().should.have.length_of(1)
|
||||
igw.id.should.match(r'igw-[0-9a-f]+')
|
||||
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach():
|
||||
""" internet gateway attach """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.attach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the AttachInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments[0].vpc_id.should.be.equal(vpc.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach_bad_vpc():
|
||||
""" internet gateway fail to attach w/ bad vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_attach_twice():
|
||||
""" internet gateway fail to attach twice """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc1 = conn.create_vpc(VPC_CIDR)
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.attach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal('Resource.AlreadyAssociated')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach():
|
||||
""" internet gateway detach"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DetachInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.detach_internet_gateway(igw.id, vpc.id)
|
||||
igw = conn.get_all_internet_gateways()[0]
|
||||
igw.attachments.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_wrong_vpc():
|
||||
""" internet gateway fail to detach w/ wrong vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc1 = conn.create_vpc(VPC_CIDR)
|
||||
vpc2 = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc1.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc2.id)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_invalid_vpc():
|
||||
""" internet gateway fail to detach w/ invalid vpc """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, BAD_VPC)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_detach_unattached():
|
||||
""" internet gateway fail to detach unattached """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.detach_internet_gateway(igw.id, vpc.id)
|
||||
cm.exception.code.should.equal('Gateway.NotAttached')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_delete():
|
||||
""" internet gateway delete"""
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
igw = conn.create_internet_gateway()
|
||||
conn.get_all_internet_gateways().should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.delete_internet_gateway(igw.id, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteInternetGateway operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.delete_internet_gateway(igw.id)
|
||||
conn.get_all_internet_gateways().should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_delete_attached():
|
||||
""" internet gateway fail to delete attached """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw.id, vpc.id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_internet_gateway(igw.id)
|
||||
cm.exception.code.should.equal('DependencyViolation')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_desribe():
|
||||
""" internet gateway fetch by id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
igw = conn.create_internet_gateway()
|
||||
igw_by_search = conn.get_all_internet_gateways([igw.id])[0]
|
||||
igw.id.should.equal(igw_by_search.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_describe_bad_id():
|
||||
""" internet gateway fail to fetch by bad id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_internet_gateways([BAD_IGW])
|
||||
cm.exception.code.should.equal('InvalidInternetGatewayID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_vpc_id():
|
||||
""" internet gateway filter by vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw1.id, vpc.id)
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"attachment.vpc-id": vpc.id})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_tags():
|
||||
""" internet gateway filter by vpc id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
igw1.add_tag("tests", "yes")
|
||||
|
||||
result = conn.get_all_internet_gateways(filters={"tag:tests": "yes"})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_internet_gateway_id():
|
||||
""" internet gateway filter by internet gateway id """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"internet-gateway-id": igw1.id})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_igw_filter_by_attachment_state():
|
||||
""" internet gateway filter by attachment state """
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
igw1 = conn.create_internet_gateway()
|
||||
igw2 = conn.create_internet_gateway()
|
||||
vpc = conn.create_vpc(VPC_CIDR)
|
||||
conn.attach_internet_gateway(igw1.id, vpc.id)
|
||||
|
||||
result = conn.get_all_internet_gateways(
|
||||
filters={"attachment.state": "available"})
|
||||
result.should.have.length_of(1)
|
||||
result[0].id.should.equal(igw1.id)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_ip_addresses():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_ip_addresses():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,151 +1,151 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto
|
||||
import six
|
||||
import sure # noqa
|
||||
|
||||
from boto.exception import EC2ResponseError
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_empty():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_invalid_id():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_key_pairs('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
kp = conn.create_key_pair('foo', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
kp = conn.create_key_pair('foo')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
kps = conn.get_all_key_pairs()
|
||||
assert len(kps) == 1
|
||||
assert kps[0].name == 'foo'
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create_two():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.create_key_pair('foo')
|
||||
kp = conn.create_key_pair('bar')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
kps = conn.get_all_key_pairs()
|
||||
kps.should.have.length_of(2)
|
||||
[i.name for i in kps].should.contain('foo')
|
||||
[i.name for i in kps].should.contain('bar')
|
||||
kps = conn.get_all_key_pairs('foo')
|
||||
kps.should.have.length_of(1)
|
||||
kps[0].name.should.equal('foo')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.create_key_pair('foo')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_delete_no_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
r = conn.delete_key_pair('foo')
|
||||
r.should.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_delete_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn.create_key_pair('foo')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
r = conn.delete_key_pair('foo', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
r = conn.delete_key_pair('foo')
|
||||
r.should.be.ok
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_import():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
kp = conn.import_key_pair('foo', b'content', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the ImportKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
kp = conn.import_key_pair('foo', b'content')
|
||||
assert kp.name == 'foo'
|
||||
kps = conn.get_all_key_pairs()
|
||||
assert len(kps) == 1
|
||||
assert kps[0].name == 'foo'
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_import_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.import_key_pair('foo', b'content')
|
||||
assert kp.name == 'foo'
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pair_filters():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
_ = conn.create_key_pair('kpfltr1')
|
||||
kp2 = conn.create_key_pair('kpfltr2')
|
||||
kp3 = conn.create_key_pair('kpfltr3')
|
||||
|
||||
kp_by_name = conn.get_all_key_pairs(
|
||||
filters={'key-name': 'kpfltr2'})
|
||||
set([kp.name for kp in kp_by_name]
|
||||
).should.equal(set([kp2.name]))
|
||||
|
||||
kp_by_name = conn.get_all_key_pairs(
|
||||
filters={'fingerprint': kp3.fingerprint})
|
||||
set([kp.name for kp in kp_by_name]
|
||||
).should.equal(set([kp3.name]))
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto
|
||||
import six
|
||||
import sure # noqa
|
||||
|
||||
from boto.exception import EC2ResponseError
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_empty():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_invalid_id():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_key_pairs('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
kp = conn.create_key_pair('foo', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
kp = conn.create_key_pair('foo')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
kps = conn.get_all_key_pairs()
|
||||
assert len(kps) == 1
|
||||
assert kps[0].name == 'foo'
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create_two():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.create_key_pair('foo')
|
||||
kp = conn.create_key_pair('bar')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
kps = conn.get_all_key_pairs()
|
||||
kps.should.have.length_of(2)
|
||||
[i.name for i in kps].should.contain('foo')
|
||||
[i.name for i in kps].should.contain('bar')
|
||||
kps = conn.get_all_key_pairs('foo')
|
||||
kps.should.have.length_of(1)
|
||||
kps[0].name.should.equal('foo')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_create_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.create_key_pair('foo')
|
||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_delete_no_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
r = conn.delete_key_pair('foo')
|
||||
r.should.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_delete_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn.create_key_pair('foo')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
r = conn.delete_key_pair('foo', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
r = conn.delete_key_pair('foo')
|
||||
r.should.be.ok
|
||||
assert len(conn.get_all_key_pairs()) == 0
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_import():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
kp = conn.import_key_pair('foo', b'content', dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the ImportKeyPair operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
kp = conn.import_key_pair('foo', b'content')
|
||||
assert kp.name == 'foo'
|
||||
kps = conn.get_all_key_pairs()
|
||||
assert len(kps) == 1
|
||||
assert kps[0].name == 'foo'
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pairs_import_exist():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
kp = conn.import_key_pair('foo', b'content')
|
||||
assert kp.name == 'foo'
|
||||
assert len(conn.get_all_key_pairs()) == 1
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_key_pair('foo')
|
||||
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_key_pair_filters():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
|
||||
_ = conn.create_key_pair('kpfltr1')
|
||||
kp2 = conn.create_key_pair('kpfltr2')
|
||||
kp3 = conn.create_key_pair('kpfltr3')
|
||||
|
||||
kp_by_name = conn.get_all_key_pairs(
|
||||
filters={'key-name': 'kpfltr2'})
|
||||
set([kp.name for kp in kp_by_name]
|
||||
).should.equal(set([kp2.name]))
|
||||
|
||||
kp_by_name = conn.get_all_key_pairs(
|
||||
filters={'fingerprint': kp3.fingerprint})
|
||||
set([kp.name for kp in kp_by_name]
|
||||
).should.equal(set([kp3.name]))
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_monitoring():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_monitoring():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,109 +1,109 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_nat_gateways():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
|
||||
response = conn.describe_nat_gateways()
|
||||
|
||||
response['NatGateways'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
response = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
|
||||
response['NatGateway']['VpcId'].should.equal(vpc_id)
|
||||
response['NatGateway']['SubnetId'].should.equal(subnet_id)
|
||||
response['NatGateway']['State'].should.equal('available')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_delete_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
nat_gateway = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
nat_gateway_id = nat_gateway['NatGateway']['NatGatewayId']
|
||||
response = conn.delete_nat_gateway(NatGatewayId=nat_gateway_id)
|
||||
|
||||
# this is hard to match against, so remove it
|
||||
response['ResponseMetadata'].pop('HTTPHeaders', None)
|
||||
response['ResponseMetadata'].pop('RetryAttempts', None)
|
||||
response.should.equal({
|
||||
'NatGatewayId': nat_gateway_id,
|
||||
'ResponseMetadata': {
|
||||
'HTTPStatusCode': 200,
|
||||
'RequestId': '741fc8ab-6ebe-452b-b92b-example'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_and_describe_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
create_response = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
nat_gateway_id = create_response['NatGateway']['NatGatewayId']
|
||||
describe_response = conn.describe_nat_gateways()
|
||||
|
||||
enis = conn.describe_network_interfaces()['NetworkInterfaces']
|
||||
eni_id = enis[0]['NetworkInterfaceId']
|
||||
public_ip = conn.describe_addresses(AllocationIds=[allocation_id])[
|
||||
'Addresses'][0]['PublicIp']
|
||||
|
||||
describe_response['NatGateways'].should.have.length_of(1)
|
||||
describe_response['NatGateways'][0][
|
||||
'NatGatewayId'].should.equal(nat_gateway_id)
|
||||
describe_response['NatGateways'][0]['State'].should.equal('available')
|
||||
describe_response['NatGateways'][0]['SubnetId'].should.equal(subnet_id)
|
||||
describe_response['NatGateways'][0]['VpcId'].should.equal(vpc_id)
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['AllocationId'].should.equal(allocation_id)
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['NetworkInterfaceId'].should.equal(eni_id)
|
||||
assert describe_response['NatGateways'][0][
|
||||
'NatGatewayAddresses'][0]['PrivateIp'].startswith('10.')
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['PublicIp'].should.equal(public_ip)
|
||||
from __future__ import unicode_literals
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_describe_nat_gateways():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
|
||||
response = conn.describe_nat_gateways()
|
||||
|
||||
response['NatGateways'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
response = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
|
||||
response['NatGateway']['VpcId'].should.equal(vpc_id)
|
||||
response['NatGateway']['SubnetId'].should.equal(subnet_id)
|
||||
response['NatGateway']['State'].should.equal('available')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_delete_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
nat_gateway = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
nat_gateway_id = nat_gateway['NatGateway']['NatGatewayId']
|
||||
response = conn.delete_nat_gateway(NatGatewayId=nat_gateway_id)
|
||||
|
||||
# this is hard to match against, so remove it
|
||||
response['ResponseMetadata'].pop('HTTPHeaders', None)
|
||||
response['ResponseMetadata'].pop('RetryAttempts', None)
|
||||
response.should.equal({
|
||||
'NatGatewayId': nat_gateway_id,
|
||||
'ResponseMetadata': {
|
||||
'HTTPStatusCode': 200,
|
||||
'RequestId': '741fc8ab-6ebe-452b-b92b-example'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_and_describe_nat_gateway():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc_id = vpc['Vpc']['VpcId']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc_id,
|
||||
CidrBlock='10.0.1.0/27',
|
||||
AvailabilityZone='us-east-1a',
|
||||
)
|
||||
allocation_id = conn.allocate_address(Domain='vpc')['AllocationId']
|
||||
subnet_id = subnet['Subnet']['SubnetId']
|
||||
|
||||
create_response = conn.create_nat_gateway(
|
||||
SubnetId=subnet_id,
|
||||
AllocationId=allocation_id,
|
||||
)
|
||||
nat_gateway_id = create_response['NatGateway']['NatGatewayId']
|
||||
describe_response = conn.describe_nat_gateways()
|
||||
|
||||
enis = conn.describe_network_interfaces()['NetworkInterfaces']
|
||||
eni_id = enis[0]['NetworkInterfaceId']
|
||||
public_ip = conn.describe_addresses(AllocationIds=[allocation_id])[
|
||||
'Addresses'][0]['PublicIp']
|
||||
|
||||
describe_response['NatGateways'].should.have.length_of(1)
|
||||
describe_response['NatGateways'][0][
|
||||
'NatGatewayId'].should.equal(nat_gateway_id)
|
||||
describe_response['NatGateways'][0]['State'].should.equal('available')
|
||||
describe_response['NatGateways'][0]['SubnetId'].should.equal(subnet_id)
|
||||
describe_response['NatGateways'][0]['VpcId'].should.equal(vpc_id)
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['AllocationId'].should.equal(allocation_id)
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['NetworkInterfaceId'].should.equal(eni_id)
|
||||
assert describe_response['NatGateways'][0][
|
||||
'NatGatewayAddresses'][0]['PrivateIp'].startswith('10.')
|
||||
describe_response['NatGateways'][0]['NatGatewayAddresses'][
|
||||
0]['PublicIp'].should.equal(public_ip)
|
||||
|
|
|
|||
|
|
@ -1,175 +1,175 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_default_network_acl_created_with_vpc():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acls():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_new_subnet_associates_with_default_network_acl():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.get_all_vpcs()[0]
|
||||
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(1)
|
||||
|
||||
acl = all_network_acls[0]
|
||||
acl.associations.should.have.length_of(4)
|
||||
[a.subnet_id for a in acl.associations].should.contain(subnet.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acl_entries():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
network_acl_entry = conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(1)
|
||||
entries[0].rule_number.should.equal('110')
|
||||
entries[0].protocol.should.equal('6')
|
||||
entries[0].rule_action.should.equal('ALLOW')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_network_acl_entry():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
conn.delete_network_acl_entry(
|
||||
network_acl.id, 110, False
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_replace_network_acl_entry():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
conn.replace_network_acl_entry(
|
||||
network_acl.id, 110, -1,
|
||||
'DENY', '0.0.0.0/0', False,
|
||||
port_range_from='22',
|
||||
port_range_to='22'
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(1)
|
||||
entries[0].rule_number.should.equal('110')
|
||||
entries[0].protocol.should.equal('-1')
|
||||
entries[0].rule_action.should.equal('DENY')
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_associate_new_network_acl_with_subnet():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.associate_network_acl(network_acl.id, subnet.id)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
|
||||
test_network_acl.associations.should.have.length_of(1)
|
||||
test_network_acl.associations[0].subnet_id.should.equal(subnet.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_network_acl():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
any(acl.id == network_acl.id for acl in all_network_acls).should.be.ok
|
||||
|
||||
conn.delete_network_acl(network_acl.id)
|
||||
|
||||
updated_network_acls = conn.get_all_network_acls()
|
||||
updated_network_acls.should.have.length_of(2)
|
||||
|
||||
any(acl.id == network_acl.id for acl in updated_network_acls).shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acl_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
network_acl.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
test_network_acl.tags.should.have.length_of(1)
|
||||
test_network_acl.tags["a key"].should.equal("some value")
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_default_network_acl_created_with_vpc():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acls():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_new_subnet_associates_with_default_network_acl():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.get_all_vpcs()[0]
|
||||
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(1)
|
||||
|
||||
acl = all_network_acls[0]
|
||||
acl.associations.should.have.length_of(4)
|
||||
[a.subnet_id for a in acl.associations].should.contain(subnet.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acl_entries():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
network_acl_entry = conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(1)
|
||||
entries[0].rule_number.should.equal('110')
|
||||
entries[0].protocol.should.equal('6')
|
||||
entries[0].rule_action.should.equal('ALLOW')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_network_acl_entry():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
conn.delete_network_acl_entry(
|
||||
network_acl.id, 110, False
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_replace_network_acl_entry():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.create_network_acl_entry(
|
||||
network_acl.id, 110, 6,
|
||||
'ALLOW', '0.0.0.0/0', False,
|
||||
port_range_from='443',
|
||||
port_range_to='443'
|
||||
)
|
||||
conn.replace_network_acl_entry(
|
||||
network_acl.id, 110, -1,
|
||||
'DENY', '0.0.0.0/0', False,
|
||||
port_range_from='22',
|
||||
port_range_to='22'
|
||||
)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
entries = test_network_acl.network_acl_entries
|
||||
entries.should.have.length_of(1)
|
||||
entries[0].rule_number.should.equal('110')
|
||||
entries[0].protocol.should.equal('-1')
|
||||
entries[0].rule_action.should.equal('DENY')
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_associate_new_network_acl_with_subnet():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
conn.associate_network_acl(network_acl.id, subnet.id)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
|
||||
test_network_acl.associations.should.have.length_of(1)
|
||||
test_network_acl.associations[0].subnet_id.should.equal(subnet.id)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_network_acl():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
all_network_acls.should.have.length_of(3)
|
||||
|
||||
any(acl.id == network_acl.id for acl in all_network_acls).should.be.ok
|
||||
|
||||
conn.delete_network_acl(network_acl.id)
|
||||
|
||||
updated_network_acls = conn.get_all_network_acls()
|
||||
updated_network_acls.should.have.length_of(2)
|
||||
|
||||
any(acl.id == network_acl.id for acl in updated_network_acls).shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_network_acl_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
network_acl = conn.create_network_acl(vpc.id)
|
||||
|
||||
network_acl.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
all_network_acls = conn.get_all_network_acls()
|
||||
test_network_acl = next(na for na in all_network_acls
|
||||
if na.id == network_acl.id)
|
||||
test_network_acl.tags.should.have.length_of(1)
|
||||
test_network_acl.tags["a key"].should.equal("some value")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_placement_groups():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_placement_groups():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,148 +1,148 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto.ec2
|
||||
import boto.ec2.autoscale
|
||||
import boto.ec2.elb
|
||||
import sure
|
||||
from moto import mock_ec2_deprecated, mock_autoscaling_deprecated, mock_elb_deprecated
|
||||
|
||||
from moto.ec2 import ec2_backends
|
||||
|
||||
def test_use_boto_regions():
|
||||
boto_regions = {r.name for r in boto.ec2.regions()}
|
||||
moto_regions = set(ec2_backends)
|
||||
|
||||
moto_regions.should.equal(boto_regions)
|
||||
|
||||
def add_servers_to_region(ami_id, count, region):
|
||||
conn = boto.ec2.connect_to_region(region)
|
||||
for index in range(count):
|
||||
conn.run_instances(ami_id)
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_servers_to_a_single_region():
|
||||
region = 'ap-northeast-1'
|
||||
add_servers_to_region('ami-1234abcd', 1, region)
|
||||
add_servers_to_region('ami-5678efgh', 1, region)
|
||||
|
||||
conn = boto.ec2.connect_to_region(region)
|
||||
reservations = conn.get_all_instances()
|
||||
len(reservations).should.equal(2)
|
||||
reservations.sort(key=lambda x: x.instances[0].image_id)
|
||||
|
||||
reservations[0].instances[0].image_id.should.equal('ami-1234abcd')
|
||||
reservations[1].instances[0].image_id.should.equal('ami-5678efgh')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_servers_to_multiple_regions():
|
||||
region1 = 'us-east-1'
|
||||
region2 = 'ap-northeast-1'
|
||||
add_servers_to_region('ami-1234abcd', 1, region1)
|
||||
add_servers_to_region('ami-5678efgh', 1, region2)
|
||||
|
||||
us_conn = boto.ec2.connect_to_region(region1)
|
||||
ap_conn = boto.ec2.connect_to_region(region2)
|
||||
us_reservations = us_conn.get_all_instances()
|
||||
ap_reservations = ap_conn.get_all_instances()
|
||||
|
||||
len(us_reservations).should.equal(1)
|
||||
len(ap_reservations).should.equal(1)
|
||||
|
||||
us_reservations[0].instances[0].image_id.should.equal('ami-1234abcd')
|
||||
ap_reservations[0].instances[0].image_id.should.equal('ami-5678efgh')
|
||||
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
@mock_elb_deprecated
|
||||
def test_create_autoscaling_group():
|
||||
elb_conn = boto.ec2.elb.connect_to_region('us-east-1')
|
||||
elb_conn.create_load_balancer(
|
||||
'us_test_lb', zones=[], listeners=[(80, 8080, 'http')])
|
||||
elb_conn = boto.ec2.elb.connect_to_region('ap-northeast-1')
|
||||
elb_conn.create_load_balancer(
|
||||
'ap_test_lb', zones=[], listeners=[(80, 8080, 'http')])
|
||||
|
||||
us_conn = boto.ec2.autoscale.connect_to_region('us-east-1')
|
||||
config = boto.ec2.autoscale.LaunchConfiguration(
|
||||
name='us_tester',
|
||||
image_id='ami-abcd1234',
|
||||
instance_type='m1.small',
|
||||
)
|
||||
us_conn.create_launch_configuration(config)
|
||||
|
||||
group = boto.ec2.autoscale.AutoScalingGroup(
|
||||
name='us_tester_group',
|
||||
availability_zones=['us-east-1c'],
|
||||
default_cooldown=60,
|
||||
desired_capacity=2,
|
||||
health_check_period=100,
|
||||
health_check_type="EC2",
|
||||
max_size=2,
|
||||
min_size=2,
|
||||
launch_config=config,
|
||||
load_balancers=["us_test_lb"],
|
||||
placement_group="us_test_placement",
|
||||
vpc_zone_identifier='subnet-1234abcd',
|
||||
termination_policies=["OldestInstance", "NewestInstance"],
|
||||
)
|
||||
us_conn.create_auto_scaling_group(group)
|
||||
|
||||
ap_conn = boto.ec2.autoscale.connect_to_region('ap-northeast-1')
|
||||
config = boto.ec2.autoscale.LaunchConfiguration(
|
||||
name='ap_tester',
|
||||
image_id='ami-efgh5678',
|
||||
instance_type='m1.small',
|
||||
)
|
||||
ap_conn.create_launch_configuration(config)
|
||||
|
||||
group = boto.ec2.autoscale.AutoScalingGroup(
|
||||
name='ap_tester_group',
|
||||
availability_zones=['ap-northeast-1a'],
|
||||
default_cooldown=60,
|
||||
desired_capacity=2,
|
||||
health_check_period=100,
|
||||
health_check_type="EC2",
|
||||
max_size=2,
|
||||
min_size=2,
|
||||
launch_config=config,
|
||||
load_balancers=["ap_test_lb"],
|
||||
placement_group="ap_test_placement",
|
||||
vpc_zone_identifier='subnet-5678efgh',
|
||||
termination_policies=["OldestInstance", "NewestInstance"],
|
||||
)
|
||||
ap_conn.create_auto_scaling_group(group)
|
||||
|
||||
len(us_conn.get_all_groups()).should.equal(1)
|
||||
len(ap_conn.get_all_groups()).should.equal(1)
|
||||
|
||||
us_group = us_conn.get_all_groups()[0]
|
||||
us_group.name.should.equal('us_tester_group')
|
||||
list(us_group.availability_zones).should.equal(['us-east-1c'])
|
||||
us_group.desired_capacity.should.equal(2)
|
||||
us_group.max_size.should.equal(2)
|
||||
us_group.min_size.should.equal(2)
|
||||
us_group.vpc_zone_identifier.should.equal('subnet-1234abcd')
|
||||
us_group.launch_config_name.should.equal('us_tester')
|
||||
us_group.default_cooldown.should.equal(60)
|
||||
us_group.health_check_period.should.equal(100)
|
||||
us_group.health_check_type.should.equal("EC2")
|
||||
list(us_group.load_balancers).should.equal(["us_test_lb"])
|
||||
us_group.placement_group.should.equal("us_test_placement")
|
||||
list(us_group.termination_policies).should.equal(
|
||||
["OldestInstance", "NewestInstance"])
|
||||
|
||||
ap_group = ap_conn.get_all_groups()[0]
|
||||
ap_group.name.should.equal('ap_tester_group')
|
||||
list(ap_group.availability_zones).should.equal(['ap-northeast-1a'])
|
||||
ap_group.desired_capacity.should.equal(2)
|
||||
ap_group.max_size.should.equal(2)
|
||||
ap_group.min_size.should.equal(2)
|
||||
ap_group.vpc_zone_identifier.should.equal('subnet-5678efgh')
|
||||
ap_group.launch_config_name.should.equal('ap_tester')
|
||||
ap_group.default_cooldown.should.equal(60)
|
||||
ap_group.health_check_period.should.equal(100)
|
||||
ap_group.health_check_type.should.equal("EC2")
|
||||
list(ap_group.load_balancers).should.equal(["ap_test_lb"])
|
||||
ap_group.placement_group.should.equal("ap_test_placement")
|
||||
list(ap_group.termination_policies).should.equal(
|
||||
["OldestInstance", "NewestInstance"])
|
||||
from __future__ import unicode_literals
|
||||
import boto.ec2
|
||||
import boto.ec2.autoscale
|
||||
import boto.ec2.elb
|
||||
import sure
|
||||
from moto import mock_ec2_deprecated, mock_autoscaling_deprecated, mock_elb_deprecated
|
||||
|
||||
from moto.ec2 import ec2_backends
|
||||
|
||||
def test_use_boto_regions():
|
||||
boto_regions = {r.name for r in boto.ec2.regions()}
|
||||
moto_regions = set(ec2_backends)
|
||||
|
||||
moto_regions.should.equal(boto_regions)
|
||||
|
||||
def add_servers_to_region(ami_id, count, region):
|
||||
conn = boto.ec2.connect_to_region(region)
|
||||
for index in range(count):
|
||||
conn.run_instances(ami_id)
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_servers_to_a_single_region():
|
||||
region = 'ap-northeast-1'
|
||||
add_servers_to_region('ami-1234abcd', 1, region)
|
||||
add_servers_to_region('ami-5678efgh', 1, region)
|
||||
|
||||
conn = boto.ec2.connect_to_region(region)
|
||||
reservations = conn.get_all_instances()
|
||||
len(reservations).should.equal(2)
|
||||
reservations.sort(key=lambda x: x.instances[0].image_id)
|
||||
|
||||
reservations[0].instances[0].image_id.should.equal('ami-1234abcd')
|
||||
reservations[1].instances[0].image_id.should.equal('ami-5678efgh')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_servers_to_multiple_regions():
|
||||
region1 = 'us-east-1'
|
||||
region2 = 'ap-northeast-1'
|
||||
add_servers_to_region('ami-1234abcd', 1, region1)
|
||||
add_servers_to_region('ami-5678efgh', 1, region2)
|
||||
|
||||
us_conn = boto.ec2.connect_to_region(region1)
|
||||
ap_conn = boto.ec2.connect_to_region(region2)
|
||||
us_reservations = us_conn.get_all_instances()
|
||||
ap_reservations = ap_conn.get_all_instances()
|
||||
|
||||
len(us_reservations).should.equal(1)
|
||||
len(ap_reservations).should.equal(1)
|
||||
|
||||
us_reservations[0].instances[0].image_id.should.equal('ami-1234abcd')
|
||||
ap_reservations[0].instances[0].image_id.should.equal('ami-5678efgh')
|
||||
|
||||
|
||||
@mock_autoscaling_deprecated
|
||||
@mock_elb_deprecated
|
||||
def test_create_autoscaling_group():
|
||||
elb_conn = boto.ec2.elb.connect_to_region('us-east-1')
|
||||
elb_conn.create_load_balancer(
|
||||
'us_test_lb', zones=[], listeners=[(80, 8080, 'http')])
|
||||
elb_conn = boto.ec2.elb.connect_to_region('ap-northeast-1')
|
||||
elb_conn.create_load_balancer(
|
||||
'ap_test_lb', zones=[], listeners=[(80, 8080, 'http')])
|
||||
|
||||
us_conn = boto.ec2.autoscale.connect_to_region('us-east-1')
|
||||
config = boto.ec2.autoscale.LaunchConfiguration(
|
||||
name='us_tester',
|
||||
image_id='ami-abcd1234',
|
||||
instance_type='m1.small',
|
||||
)
|
||||
us_conn.create_launch_configuration(config)
|
||||
|
||||
group = boto.ec2.autoscale.AutoScalingGroup(
|
||||
name='us_tester_group',
|
||||
availability_zones=['us-east-1c'],
|
||||
default_cooldown=60,
|
||||
desired_capacity=2,
|
||||
health_check_period=100,
|
||||
health_check_type="EC2",
|
||||
max_size=2,
|
||||
min_size=2,
|
||||
launch_config=config,
|
||||
load_balancers=["us_test_lb"],
|
||||
placement_group="us_test_placement",
|
||||
vpc_zone_identifier='subnet-1234abcd',
|
||||
termination_policies=["OldestInstance", "NewestInstance"],
|
||||
)
|
||||
us_conn.create_auto_scaling_group(group)
|
||||
|
||||
ap_conn = boto.ec2.autoscale.connect_to_region('ap-northeast-1')
|
||||
config = boto.ec2.autoscale.LaunchConfiguration(
|
||||
name='ap_tester',
|
||||
image_id='ami-efgh5678',
|
||||
instance_type='m1.small',
|
||||
)
|
||||
ap_conn.create_launch_configuration(config)
|
||||
|
||||
group = boto.ec2.autoscale.AutoScalingGroup(
|
||||
name='ap_tester_group',
|
||||
availability_zones=['ap-northeast-1a'],
|
||||
default_cooldown=60,
|
||||
desired_capacity=2,
|
||||
health_check_period=100,
|
||||
health_check_type="EC2",
|
||||
max_size=2,
|
||||
min_size=2,
|
||||
launch_config=config,
|
||||
load_balancers=["ap_test_lb"],
|
||||
placement_group="ap_test_placement",
|
||||
vpc_zone_identifier='subnet-5678efgh',
|
||||
termination_policies=["OldestInstance", "NewestInstance"],
|
||||
)
|
||||
ap_conn.create_auto_scaling_group(group)
|
||||
|
||||
len(us_conn.get_all_groups()).should.equal(1)
|
||||
len(ap_conn.get_all_groups()).should.equal(1)
|
||||
|
||||
us_group = us_conn.get_all_groups()[0]
|
||||
us_group.name.should.equal('us_tester_group')
|
||||
list(us_group.availability_zones).should.equal(['us-east-1c'])
|
||||
us_group.desired_capacity.should.equal(2)
|
||||
us_group.max_size.should.equal(2)
|
||||
us_group.min_size.should.equal(2)
|
||||
us_group.vpc_zone_identifier.should.equal('subnet-1234abcd')
|
||||
us_group.launch_config_name.should.equal('us_tester')
|
||||
us_group.default_cooldown.should.equal(60)
|
||||
us_group.health_check_period.should.equal(100)
|
||||
us_group.health_check_type.should.equal("EC2")
|
||||
list(us_group.load_balancers).should.equal(["us_test_lb"])
|
||||
us_group.placement_group.should.equal("us_test_placement")
|
||||
list(us_group.termination_policies).should.equal(
|
||||
["OldestInstance", "NewestInstance"])
|
||||
|
||||
ap_group = ap_conn.get_all_groups()[0]
|
||||
ap_group.name.should.equal('ap_tester_group')
|
||||
list(ap_group.availability_zones).should.equal(['ap-northeast-1a'])
|
||||
ap_group.desired_capacity.should.equal(2)
|
||||
ap_group.max_size.should.equal(2)
|
||||
ap_group.min_size.should.equal(2)
|
||||
ap_group.vpc_zone_identifier.should.equal('subnet-5678efgh')
|
||||
ap_group.launch_config_name.should.equal('ap_tester')
|
||||
ap_group.default_cooldown.should.equal(60)
|
||||
ap_group.health_check_period.should.equal(100)
|
||||
ap_group.health_check_type.should.equal("EC2")
|
||||
list(ap_group.load_balancers).should.equal(["ap_test_lb"])
|
||||
ap_group.placement_group.should.equal("ap_test_placement")
|
||||
list(ap_group.termination_policies).should.equal(
|
||||
["OldestInstance", "NewestInstance"])
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_reserved_instances():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_reserved_instances():
|
||||
pass
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,26 +1,26 @@
|
|||
from __future__ import unicode_literals
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_ec2_server_get():
|
||||
backend = server.create_backend_app("ec2")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get(
|
||||
'/?Action=RunInstances&ImageId=ami-60a54009',
|
||||
headers={"Host": "ec2.us-east-1.amazonaws.com"}
|
||||
)
|
||||
|
||||
groups = re.search("<instanceId>(.*)</instanceId>",
|
||||
res.data.decode('utf-8'))
|
||||
instance_id = groups.groups()[0]
|
||||
|
||||
res = test_client.get('/?Action=DescribeInstances')
|
||||
res.data.decode('utf-8').should.contain(instance_id)
|
||||
from __future__ import unicode_literals
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_ec2_server_get():
|
||||
backend = server.create_backend_app("ec2")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get(
|
||||
'/?Action=RunInstances&ImageId=ami-60a54009',
|
||||
headers={"Host": "ec2.us-east-1.amazonaws.com"}
|
||||
)
|
||||
|
||||
groups = re.search("<instanceId>(.*)</instanceId>",
|
||||
res.data.decode('utf-8'))
|
||||
instance_id = groups.groups()[0]
|
||||
|
||||
res = test_client.get('/?Action=DescribeInstances')
|
||||
res.data.decode('utf-8').should.contain(instance_id)
|
||||
|
|
|
|||
|
|
@ -1,345 +1,345 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
def get_subnet_id(conn):
|
||||
vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet']
|
||||
subnet_id = subnet['SubnetId']
|
||||
return subnet_id
|
||||
|
||||
|
||||
def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
||||
return {
|
||||
'ClientToken': 'string',
|
||||
'SpotPrice': '0.12',
|
||||
'TargetCapacity': 6,
|
||||
'IamFleetRole': 'arn:aws:iam::123456789012:role/fleet',
|
||||
'LaunchSpecifications': [{
|
||||
'ImageId': 'ami-123',
|
||||
'KeyName': 'my-key',
|
||||
'SecurityGroups': [
|
||||
{
|
||||
'GroupId': 'sg-123'
|
||||
},
|
||||
],
|
||||
'UserData': 'some user data',
|
||||
'InstanceType': 't2.small',
|
||||
'BlockDeviceMappings': [
|
||||
{
|
||||
'VirtualName': 'string',
|
||||
'DeviceName': 'string',
|
||||
'Ebs': {
|
||||
'SnapshotId': 'string',
|
||||
'VolumeSize': 123,
|
||||
'DeleteOnTermination': True | False,
|
||||
'VolumeType': 'standard',
|
||||
'Iops': 123,
|
||||
'Encrypted': True | False
|
||||
},
|
||||
'NoDevice': 'string'
|
||||
},
|
||||
],
|
||||
'Monitoring': {
|
||||
'Enabled': True
|
||||
},
|
||||
'SubnetId': subnet_id,
|
||||
'IamInstanceProfile': {
|
||||
'Arn': 'arn:aws:iam::123456789012:role/fleet'
|
||||
},
|
||||
'EbsOptimized': False,
|
||||
'WeightedCapacity': 2.0,
|
||||
'SpotPrice': '0.13'
|
||||
}, {
|
||||
'ImageId': 'ami-123',
|
||||
'KeyName': 'my-key',
|
||||
'SecurityGroups': [
|
||||
{
|
||||
'GroupId': 'sg-123'
|
||||
},
|
||||
],
|
||||
'UserData': 'some user data',
|
||||
'InstanceType': 't2.large',
|
||||
'Monitoring': {
|
||||
'Enabled': True
|
||||
},
|
||||
'SubnetId': subnet_id,
|
||||
'IamInstanceProfile': {
|
||||
'Arn': 'arn:aws:iam::123456789012:role/fleet'
|
||||
},
|
||||
'EbsOptimized': False,
|
||||
'WeightedCapacity': 4.0,
|
||||
'SpotPrice': '10.00',
|
||||
}],
|
||||
'AllocationStrategy': allocation_strategy,
|
||||
'FulfilledCapacity': 6,
|
||||
}
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_spot_fleet_with_lowest_price():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id)
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(1)
|
||||
spot_fleet_request = spot_fleet_requests[0]
|
||||
spot_fleet_request['SpotFleetRequestState'].should.equal("active")
|
||||
spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig']
|
||||
|
||||
spot_fleet_config['SpotPrice'].should.equal('0.12')
|
||||
spot_fleet_config['TargetCapacity'].should.equal(6)
|
||||
spot_fleet_config['IamFleetRole'].should.equal(
|
||||
'arn:aws:iam::123456789012:role/fleet')
|
||||
spot_fleet_config['AllocationStrategy'].should.equal('lowestPrice')
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
len(spot_fleet_config['LaunchSpecifications']).should.equal(2)
|
||||
launch_spec = spot_fleet_config['LaunchSpecifications'][0]
|
||||
|
||||
launch_spec['EbsOptimized'].should.equal(False)
|
||||
launch_spec['SecurityGroups'].should.equal([{"GroupId": "sg-123"}])
|
||||
launch_spec['IamInstanceProfile'].should.equal(
|
||||
{"Arn": "arn:aws:iam::123456789012:role/fleet"})
|
||||
launch_spec['ImageId'].should.equal("ami-123")
|
||||
launch_spec['InstanceType'].should.equal("t2.small")
|
||||
launch_spec['KeyName'].should.equal("my-key")
|
||||
launch_spec['Monitoring'].should.equal({"Enabled": True})
|
||||
launch_spec['SpotPrice'].should.equal("0.13")
|
||||
launch_spec['SubnetId'].should.equal(subnet_id)
|
||||
launch_spec['UserData'].should.equal("some user data")
|
||||
launch_spec['WeightedCapacity'].should.equal(2.0)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_diversified_spot_fleet():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
diversified_config = spot_config(
|
||||
subnet_id, allocation_strategy='diversified')
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=diversified_config
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(2)
|
||||
instance_types = set([instance['InstanceType'] for instance in instances])
|
||||
instance_types.should.equal(set(["t2.small", "t2.large"]))
|
||||
instances[0]['InstanceId'].should.contain("i-")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_cancel_spot_fleet_request():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.cancel_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id], TerminateInstances=True)
|
||||
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_up():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=20)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(10)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(20)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(20.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_up_diversified():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(
|
||||
subnet_id, allocation_strategy='diversified'),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=19)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(7)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(19)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(20.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_no_terminate():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination")
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_odd():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=7)
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=5)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(5)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(1)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(2.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_no_terminate_after_custom_terminate():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
conn.terminate_instances(InstanceIds=[i['InstanceId'] for i in instances[1:]])
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination")
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(1)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(2.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_spot_fleet_without_spot_price():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
# remove prices to force a fallback to ondemand price
|
||||
spot_config_without_price = spot_config(subnet_id)
|
||||
del spot_config_without_price['SpotPrice']
|
||||
for spec in spot_config_without_price['LaunchSpecifications']:
|
||||
del spec['SpotPrice']
|
||||
|
||||
spot_fleet_id = conn.request_spot_fleet(SpotFleetRequestConfig=spot_config_without_price)['SpotFleetRequestId']
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(1)
|
||||
spot_fleet_request = spot_fleet_requests[0]
|
||||
spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig']
|
||||
|
||||
len(spot_fleet_config['LaunchSpecifications']).should.equal(2)
|
||||
launch_spec1 = spot_fleet_config['LaunchSpecifications'][0]
|
||||
launch_spec2 = spot_fleet_config['LaunchSpecifications'][1]
|
||||
|
||||
# AWS will figure out the price
|
||||
assert 'SpotPrice' not in launch_spec1
|
||||
assert 'SpotPrice' not in launch_spec2
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
def get_subnet_id(conn):
|
||||
vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet']
|
||||
subnet_id = subnet['SubnetId']
|
||||
return subnet_id
|
||||
|
||||
|
||||
def spot_config(subnet_id, allocation_strategy="lowestPrice"):
|
||||
return {
|
||||
'ClientToken': 'string',
|
||||
'SpotPrice': '0.12',
|
||||
'TargetCapacity': 6,
|
||||
'IamFleetRole': 'arn:aws:iam::123456789012:role/fleet',
|
||||
'LaunchSpecifications': [{
|
||||
'ImageId': 'ami-123',
|
||||
'KeyName': 'my-key',
|
||||
'SecurityGroups': [
|
||||
{
|
||||
'GroupId': 'sg-123'
|
||||
},
|
||||
],
|
||||
'UserData': 'some user data',
|
||||
'InstanceType': 't2.small',
|
||||
'BlockDeviceMappings': [
|
||||
{
|
||||
'VirtualName': 'string',
|
||||
'DeviceName': 'string',
|
||||
'Ebs': {
|
||||
'SnapshotId': 'string',
|
||||
'VolumeSize': 123,
|
||||
'DeleteOnTermination': True | False,
|
||||
'VolumeType': 'standard',
|
||||
'Iops': 123,
|
||||
'Encrypted': True | False
|
||||
},
|
||||
'NoDevice': 'string'
|
||||
},
|
||||
],
|
||||
'Monitoring': {
|
||||
'Enabled': True
|
||||
},
|
||||
'SubnetId': subnet_id,
|
||||
'IamInstanceProfile': {
|
||||
'Arn': 'arn:aws:iam::123456789012:role/fleet'
|
||||
},
|
||||
'EbsOptimized': False,
|
||||
'WeightedCapacity': 2.0,
|
||||
'SpotPrice': '0.13'
|
||||
}, {
|
||||
'ImageId': 'ami-123',
|
||||
'KeyName': 'my-key',
|
||||
'SecurityGroups': [
|
||||
{
|
||||
'GroupId': 'sg-123'
|
||||
},
|
||||
],
|
||||
'UserData': 'some user data',
|
||||
'InstanceType': 't2.large',
|
||||
'Monitoring': {
|
||||
'Enabled': True
|
||||
},
|
||||
'SubnetId': subnet_id,
|
||||
'IamInstanceProfile': {
|
||||
'Arn': 'arn:aws:iam::123456789012:role/fleet'
|
||||
},
|
||||
'EbsOptimized': False,
|
||||
'WeightedCapacity': 4.0,
|
||||
'SpotPrice': '10.00',
|
||||
}],
|
||||
'AllocationStrategy': allocation_strategy,
|
||||
'FulfilledCapacity': 6,
|
||||
}
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_spot_fleet_with_lowest_price():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id)
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(1)
|
||||
spot_fleet_request = spot_fleet_requests[0]
|
||||
spot_fleet_request['SpotFleetRequestState'].should.equal("active")
|
||||
spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig']
|
||||
|
||||
spot_fleet_config['SpotPrice'].should.equal('0.12')
|
||||
spot_fleet_config['TargetCapacity'].should.equal(6)
|
||||
spot_fleet_config['IamFleetRole'].should.equal(
|
||||
'arn:aws:iam::123456789012:role/fleet')
|
||||
spot_fleet_config['AllocationStrategy'].should.equal('lowestPrice')
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
len(spot_fleet_config['LaunchSpecifications']).should.equal(2)
|
||||
launch_spec = spot_fleet_config['LaunchSpecifications'][0]
|
||||
|
||||
launch_spec['EbsOptimized'].should.equal(False)
|
||||
launch_spec['SecurityGroups'].should.equal([{"GroupId": "sg-123"}])
|
||||
launch_spec['IamInstanceProfile'].should.equal(
|
||||
{"Arn": "arn:aws:iam::123456789012:role/fleet"})
|
||||
launch_spec['ImageId'].should.equal("ami-123")
|
||||
launch_spec['InstanceType'].should.equal("t2.small")
|
||||
launch_spec['KeyName'].should.equal("my-key")
|
||||
launch_spec['Monitoring'].should.equal({"Enabled": True})
|
||||
launch_spec['SpotPrice'].should.equal("0.13")
|
||||
launch_spec['SubnetId'].should.equal(subnet_id)
|
||||
launch_spec['UserData'].should.equal("some user data")
|
||||
launch_spec['WeightedCapacity'].should.equal(2.0)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_diversified_spot_fleet():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
diversified_config = spot_config(
|
||||
subnet_id, allocation_strategy='diversified')
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=diversified_config
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(2)
|
||||
instance_types = set([instance['InstanceType'] for instance in instances])
|
||||
instance_types.should.equal(set(["t2.small", "t2.large"]))
|
||||
instances[0]['InstanceId'].should.contain("i-")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_cancel_spot_fleet_request():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.cancel_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id], TerminateInstances=True)
|
||||
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_up():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=20)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(10)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(20)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(20.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_up_diversified():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(
|
||||
subnet_id, allocation_strategy='diversified'),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=19)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(7)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(19)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(20.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_no_terminate():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination")
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_odd():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=7)
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=5)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(3)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(5)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(6.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1)
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(1)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(2.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_spot_fleet_request_down_no_terminate_after_custom_terminate():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
spot_fleet_res = conn.request_spot_fleet(
|
||||
SpotFleetRequestConfig=spot_config(subnet_id),
|
||||
)
|
||||
spot_fleet_id = spot_fleet_res['SpotFleetRequestId']
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
conn.terminate_instances(InstanceIds=[i['InstanceId'] for i in instances[1:]])
|
||||
|
||||
conn.modify_spot_fleet_request(
|
||||
SpotFleetRequestId=spot_fleet_id, TargetCapacity=1, ExcessCapacityTerminationPolicy="noTermination")
|
||||
|
||||
instance_res = conn.describe_spot_fleet_instances(
|
||||
SpotFleetRequestId=spot_fleet_id)
|
||||
instances = instance_res['ActiveInstances']
|
||||
len(instances).should.equal(1)
|
||||
|
||||
spot_fleet_config = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs'][0]['SpotFleetRequestConfig']
|
||||
spot_fleet_config['TargetCapacity'].should.equal(1)
|
||||
spot_fleet_config['FulfilledCapacity'].should.equal(2.0)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_spot_fleet_without_spot_price():
|
||||
conn = boto3.client("ec2", region_name='us-west-2')
|
||||
subnet_id = get_subnet_id(conn)
|
||||
|
||||
# remove prices to force a fallback to ondemand price
|
||||
spot_config_without_price = spot_config(subnet_id)
|
||||
del spot_config_without_price['SpotPrice']
|
||||
for spec in spot_config_without_price['LaunchSpecifications']:
|
||||
del spec['SpotPrice']
|
||||
|
||||
spot_fleet_id = conn.request_spot_fleet(SpotFleetRequestConfig=spot_config_without_price)['SpotFleetRequestId']
|
||||
spot_fleet_requests = conn.describe_spot_fleet_requests(
|
||||
SpotFleetRequestIds=[spot_fleet_id])['SpotFleetRequestConfigs']
|
||||
len(spot_fleet_requests).should.equal(1)
|
||||
spot_fleet_request = spot_fleet_requests[0]
|
||||
spot_fleet_config = spot_fleet_request['SpotFleetRequestConfig']
|
||||
|
||||
len(spot_fleet_config['LaunchSpecifications']).should.equal(2)
|
||||
launch_spec1 = spot_fleet_config['LaunchSpecifications'][0]
|
||||
launch_spec2 = spot_fleet_config['LaunchSpecifications'][1]
|
||||
|
||||
# AWS will figure out the price
|
||||
assert 'SpotPrice' not in launch_spec1
|
||||
assert 'SpotPrice' not in launch_spec2
|
||||
|
|
|
|||
|
|
@ -1,268 +1,268 @@
|
|||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
import datetime
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ClientError
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
from moto.backends import get_model
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_request_spot_instances():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet']
|
||||
subnet_id = subnet['SubnetId']
|
||||
|
||||
conn.create_security_group(GroupName='group1', Description='description')
|
||||
conn.create_security_group(GroupName='group2', Description='description')
|
||||
|
||||
start_dt = datetime.datetime(2013, 1, 1).replace(tzinfo=pytz.utc)
|
||||
end_dt = datetime.datetime(2013, 1, 2).replace(tzinfo=pytz.utc)
|
||||
start = iso_8601_datetime_with_milliseconds(start_dt)
|
||||
end = iso_8601_datetime_with_milliseconds(end_dt)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5", InstanceCount=1, Type='one-time',
|
||||
ValidFrom=start, ValidUntil=end, LaunchGroup="the-group",
|
||||
AvailabilityZoneGroup='my-group',
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
"KeyName": "test",
|
||||
"SecurityGroups": ['group1', 'group2'],
|
||||
"UserData": "some test data",
|
||||
"InstanceType": 'm1.small',
|
||||
"Placement": {
|
||||
"AvailabilityZone": 'us-east-1c',
|
||||
},
|
||||
"KernelId": "test-kernel",
|
||||
"RamdiskId": "test-ramdisk",
|
||||
"Monitoring": {
|
||||
"Enabled": True,
|
||||
},
|
||||
"SubnetId": subnet_id,
|
||||
},
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response['Error']['Code'].should.equal('DryRunOperation')
|
||||
ex.exception.response['ResponseMetadata'][
|
||||
'HTTPStatusCode'].should.equal(400)
|
||||
ex.exception.response['Error']['Message'].should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the RequestSpotInstance operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5", InstanceCount=1, Type='one-time',
|
||||
ValidFrom=start, ValidUntil=end, LaunchGroup="the-group",
|
||||
AvailabilityZoneGroup='my-group',
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
"KeyName": "test",
|
||||
"SecurityGroups": ['group1', 'group2'],
|
||||
"UserData": "some test data",
|
||||
"InstanceType": 'm1.small',
|
||||
"Placement": {
|
||||
"AvailabilityZone": 'us-east-1c',
|
||||
},
|
||||
"KernelId": "test-kernel",
|
||||
"RamdiskId": "test-ramdisk",
|
||||
"Monitoring": {
|
||||
"Enabled": True,
|
||||
},
|
||||
"SubnetId": subnet_id,
|
||||
},
|
||||
)
|
||||
|
||||
requests = conn.describe_spot_instance_requests()['SpotInstanceRequests']
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request['State'].should.equal("open")
|
||||
request['SpotPrice'].should.equal("0.5")
|
||||
request['Type'].should.equal('one-time')
|
||||
request['ValidFrom'].should.equal(start_dt)
|
||||
request['ValidUntil'].should.equal(end_dt)
|
||||
request['LaunchGroup'].should.equal("the-group")
|
||||
request['AvailabilityZoneGroup'].should.equal('my-group')
|
||||
|
||||
launch_spec = request['LaunchSpecification']
|
||||
security_group_names = [group['GroupName']
|
||||
for group in launch_spec['SecurityGroups']]
|
||||
set(security_group_names).should.equal(set(['group1', 'group2']))
|
||||
|
||||
launch_spec['ImageId'].should.equal('ami-abcd1234')
|
||||
launch_spec['KeyName'].should.equal("test")
|
||||
launch_spec['InstanceType'].should.equal('m1.small')
|
||||
launch_spec['KernelId'].should.equal("test-kernel")
|
||||
launch_spec['RamdiskId'].should.equal("test-ramdisk")
|
||||
launch_spec['SubnetId'].should.equal(subnet_id)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_request_spot_instances_default_arguments():
|
||||
"""
|
||||
Test that moto set the correct default arguments
|
||||
"""
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5",
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
}
|
||||
)
|
||||
|
||||
requests = conn.describe_spot_instance_requests()['SpotInstanceRequests']
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request['State'].should.equal("open")
|
||||
request['SpotPrice'].should.equal("0.5")
|
||||
request['Type'].should.equal('one-time')
|
||||
request.shouldnt.contain('ValidFrom')
|
||||
request.shouldnt.contain('ValidUntil')
|
||||
request.shouldnt.contain('LaunchGroup')
|
||||
request.shouldnt.contain('AvailabilityZoneGroup')
|
||||
|
||||
launch_spec = request['LaunchSpecification']
|
||||
|
||||
security_group_names = [group['GroupName']
|
||||
for group in launch_spec['SecurityGroups']]
|
||||
security_group_names.should.equal(["default"])
|
||||
|
||||
launch_spec['ImageId'].should.equal('ami-abcd1234')
|
||||
request.shouldnt.contain('KeyName')
|
||||
launch_spec['InstanceType'].should.equal('m1.small')
|
||||
request.shouldnt.contain('KernelId')
|
||||
request.shouldnt.contain('RamdiskId')
|
||||
request.shouldnt.contain('SubnetId')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_cancel_spot_instance_request():
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.cancel_spot_instance_requests([requests[0].id], dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CancelSpotInstance operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.cancel_spot_instance_requests([requests[0].id])
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_request_spot_instances_fulfilled():
|
||||
"""
|
||||
Test that moto correctly fullfills a spot instance request
|
||||
"""
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request.state.should.equal("open")
|
||||
|
||||
get_model('SpotInstanceRequest', 'us-east-1')[0].state = 'active'
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request.state.should.equal("active")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_tag_spot_instance_request():
|
||||
"""
|
||||
Test that moto correctly tags a spot instance request
|
||||
"""
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request[0].add_tag('tag1', 'value1')
|
||||
request[0].add_tag('tag2', 'value2')
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
tag_dict = dict(request.tags)
|
||||
tag_dict.should.equal({'tag1': 'value1', 'tag2': 'value2'})
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_spot_instance_requests_filtering():
|
||||
"""
|
||||
Test that moto correctly filters spot instance requests
|
||||
"""
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
request1 = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request2 = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request1[0].add_tag('tag1', 'value1')
|
||||
request1[0].add_tag('tag2', 'value2')
|
||||
request2[0].add_tag('tag1', 'value1')
|
||||
request2[0].add_tag('tag2', 'wrong')
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(filters={'state': 'active'})
|
||||
requests.should.have.length_of(0)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(filters={'state': 'open'})
|
||||
requests.should.have.length_of(3)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(
|
||||
filters={'tag:tag1': 'value1'})
|
||||
requests.should.have.length_of(2)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(
|
||||
filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'})
|
||||
requests.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_request_spot_instances_setting_instance_id():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234')
|
||||
|
||||
req = get_model('SpotInstanceRequest', 'us-east-1')[0]
|
||||
req.state = 'active'
|
||||
req.instance_id = 'i-12345678'
|
||||
|
||||
request = conn.get_all_spot_instance_requests()[0]
|
||||
assert request.state == 'active'
|
||||
assert request.instance_id == 'i-12345678'
|
||||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
import datetime
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ClientError
|
||||
import pytz
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
from moto.backends import get_model
|
||||
from moto.core.utils import iso_8601_datetime_with_milliseconds
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_request_spot_instances():
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
vpc = conn.create_vpc(CidrBlock="10.0.0.0/8")['Vpc']
|
||||
subnet = conn.create_subnet(
|
||||
VpcId=vpc['VpcId'], CidrBlock='10.0.0.0/16', AvailabilityZone='us-east-1a')['Subnet']
|
||||
subnet_id = subnet['SubnetId']
|
||||
|
||||
conn.create_security_group(GroupName='group1', Description='description')
|
||||
conn.create_security_group(GroupName='group2', Description='description')
|
||||
|
||||
start_dt = datetime.datetime(2013, 1, 1).replace(tzinfo=pytz.utc)
|
||||
end_dt = datetime.datetime(2013, 1, 2).replace(tzinfo=pytz.utc)
|
||||
start = iso_8601_datetime_with_milliseconds(start_dt)
|
||||
end = iso_8601_datetime_with_milliseconds(end_dt)
|
||||
|
||||
with assert_raises(ClientError) as ex:
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5", InstanceCount=1, Type='one-time',
|
||||
ValidFrom=start, ValidUntil=end, LaunchGroup="the-group",
|
||||
AvailabilityZoneGroup='my-group',
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
"KeyName": "test",
|
||||
"SecurityGroups": ['group1', 'group2'],
|
||||
"UserData": "some test data",
|
||||
"InstanceType": 'm1.small',
|
||||
"Placement": {
|
||||
"AvailabilityZone": 'us-east-1c',
|
||||
},
|
||||
"KernelId": "test-kernel",
|
||||
"RamdiskId": "test-ramdisk",
|
||||
"Monitoring": {
|
||||
"Enabled": True,
|
||||
},
|
||||
"SubnetId": subnet_id,
|
||||
},
|
||||
DryRun=True,
|
||||
)
|
||||
ex.exception.response['Error']['Code'].should.equal('DryRunOperation')
|
||||
ex.exception.response['ResponseMetadata'][
|
||||
'HTTPStatusCode'].should.equal(400)
|
||||
ex.exception.response['Error']['Message'].should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the RequestSpotInstance operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5", InstanceCount=1, Type='one-time',
|
||||
ValidFrom=start, ValidUntil=end, LaunchGroup="the-group",
|
||||
AvailabilityZoneGroup='my-group',
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
"KeyName": "test",
|
||||
"SecurityGroups": ['group1', 'group2'],
|
||||
"UserData": "some test data",
|
||||
"InstanceType": 'm1.small',
|
||||
"Placement": {
|
||||
"AvailabilityZone": 'us-east-1c',
|
||||
},
|
||||
"KernelId": "test-kernel",
|
||||
"RamdiskId": "test-ramdisk",
|
||||
"Monitoring": {
|
||||
"Enabled": True,
|
||||
},
|
||||
"SubnetId": subnet_id,
|
||||
},
|
||||
)
|
||||
|
||||
requests = conn.describe_spot_instance_requests()['SpotInstanceRequests']
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request['State'].should.equal("open")
|
||||
request['SpotPrice'].should.equal("0.5")
|
||||
request['Type'].should.equal('one-time')
|
||||
request['ValidFrom'].should.equal(start_dt)
|
||||
request['ValidUntil'].should.equal(end_dt)
|
||||
request['LaunchGroup'].should.equal("the-group")
|
||||
request['AvailabilityZoneGroup'].should.equal('my-group')
|
||||
|
||||
launch_spec = request['LaunchSpecification']
|
||||
security_group_names = [group['GroupName']
|
||||
for group in launch_spec['SecurityGroups']]
|
||||
set(security_group_names).should.equal(set(['group1', 'group2']))
|
||||
|
||||
launch_spec['ImageId'].should.equal('ami-abcd1234')
|
||||
launch_spec['KeyName'].should.equal("test")
|
||||
launch_spec['InstanceType'].should.equal('m1.small')
|
||||
launch_spec['KernelId'].should.equal("test-kernel")
|
||||
launch_spec['RamdiskId'].should.equal("test-ramdisk")
|
||||
launch_spec['SubnetId'].should.equal(subnet_id)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_request_spot_instances_default_arguments():
|
||||
"""
|
||||
Test that moto set the correct default arguments
|
||||
"""
|
||||
conn = boto3.client('ec2', 'us-east-1')
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
SpotPrice="0.5",
|
||||
LaunchSpecification={
|
||||
"ImageId": 'ami-abcd1234',
|
||||
}
|
||||
)
|
||||
|
||||
requests = conn.describe_spot_instance_requests()['SpotInstanceRequests']
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request['State'].should.equal("open")
|
||||
request['SpotPrice'].should.equal("0.5")
|
||||
request['Type'].should.equal('one-time')
|
||||
request.shouldnt.contain('ValidFrom')
|
||||
request.shouldnt.contain('ValidUntil')
|
||||
request.shouldnt.contain('LaunchGroup')
|
||||
request.shouldnt.contain('AvailabilityZoneGroup')
|
||||
|
||||
launch_spec = request['LaunchSpecification']
|
||||
|
||||
security_group_names = [group['GroupName']
|
||||
for group in launch_spec['SecurityGroups']]
|
||||
security_group_names.should.equal(["default"])
|
||||
|
||||
launch_spec['ImageId'].should.equal('ami-abcd1234')
|
||||
request.shouldnt.contain('KeyName')
|
||||
launch_spec['InstanceType'].should.equal('m1.small')
|
||||
request.shouldnt.contain('KernelId')
|
||||
request.shouldnt.contain('RamdiskId')
|
||||
request.shouldnt.contain('SubnetId')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_cancel_spot_instance_request():
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.cancel_spot_instance_requests([requests[0].id], dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CancelSpotInstance operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.cancel_spot_instance_requests([requests[0].id])
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_request_spot_instances_fulfilled():
|
||||
"""
|
||||
Test that moto correctly fullfills a spot instance request
|
||||
"""
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request.state.should.equal("open")
|
||||
|
||||
get_model('SpotInstanceRequest', 'us-east-1')[0].state = 'active'
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
request.state.should.equal("active")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_tag_spot_instance_request():
|
||||
"""
|
||||
Test that moto correctly tags a spot instance request
|
||||
"""
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request[0].add_tag('tag1', 'value1')
|
||||
request[0].add_tag('tag2', 'value2')
|
||||
|
||||
requests = conn.get_all_spot_instance_requests()
|
||||
requests.should.have.length_of(1)
|
||||
request = requests[0]
|
||||
|
||||
tag_dict = dict(request.tags)
|
||||
tag_dict.should.equal({'tag1': 'value1', 'tag2': 'value2'})
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_spot_instance_requests_filtering():
|
||||
"""
|
||||
Test that moto correctly filters spot instance requests
|
||||
"""
|
||||
conn = boto.connect_ec2()
|
||||
|
||||
request1 = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request2 = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234',
|
||||
)
|
||||
request1[0].add_tag('tag1', 'value1')
|
||||
request1[0].add_tag('tag2', 'value2')
|
||||
request2[0].add_tag('tag1', 'value1')
|
||||
request2[0].add_tag('tag2', 'wrong')
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(filters={'state': 'active'})
|
||||
requests.should.have.length_of(0)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(filters={'state': 'open'})
|
||||
requests.should.have.length_of(3)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(
|
||||
filters={'tag:tag1': 'value1'})
|
||||
requests.should.have.length_of(2)
|
||||
|
||||
requests = conn.get_all_spot_instance_requests(
|
||||
filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'})
|
||||
requests.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_request_spot_instances_setting_instance_id():
|
||||
conn = boto.ec2.connect_to_region("us-east-1")
|
||||
request = conn.request_spot_instances(
|
||||
price=0.5, image_id='ami-abcd1234')
|
||||
|
||||
req = get_model('SpotInstanceRequest', 'us-east-1')[0]
|
||||
req.state = 'active'
|
||||
req.instance_id = 'i-12345678'
|
||||
|
||||
request = conn.get_all_spot_instance_requests()[0]
|
||||
assert request.state == 'active'
|
||||
assert request.instance_id == 'i-12345678'
|
||||
|
|
|
|||
|
|
@ -1,291 +1,291 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
import boto.vpc
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ParamValidationError
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudformation_deprecated, mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnets():
|
||||
ec2 = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(1 + len(ec2.get_all_zones()))
|
||||
|
||||
conn.delete_subnet(subnet.id)
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(0 + len(ec2.get_all_zones()))
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_subnet(subnet.id)
|
||||
cm.exception.code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_create_vpc_validation():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_subnet("vpc-abcd1234", "10.0.0.0/18")
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
subnet.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
subnet = conn.get_all_subnets(subnet_ids=[subnet.id])[0]
|
||||
subnet.tags.should.have.length_of(1)
|
||||
subnet.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_should_have_proper_availability_zone_set():
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1b')
|
||||
subnetA.availability_zone.should.equal('us-west-1b')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_default_subnet():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
default_vpc = list(ec2.vpcs.all())[0]
|
||||
default_vpc.cidr_block.should.equal('172.31.0.0/16')
|
||||
default_vpc.reload()
|
||||
default_vpc.is_default.should.be.ok
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=default_vpc.id, CidrBlock='172.31.0.0/20', AvailabilityZone='us-west-1a')
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_non_default_subnet():
|
||||
vpc_cli = boto.vpc.connect_to_region('us-west-1')
|
||||
|
||||
# Create the non default VPC
|
||||
vpc = vpc_cli.create_vpc("10.0.0.0/16")
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet = vpc_cli.create_subnet(vpc.id, "10.0.0.0/24")
|
||||
subnet = vpc_cli.get_all_subnets(subnet_ids=[subnet.id])[0]
|
||||
subnet.mapPublicIpOnLaunch.should.equal('false')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_non_default_subnet():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
# Create the non default VPC
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc.reload()
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_subnet_attribute():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
client = boto3.client('ec2', region_name='us-west-1')
|
||||
|
||||
# Get the default VPC
|
||||
vpc = list(ec2.vpcs.all())[0]
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
|
||||
# 'map_public_ip_on_launch' is set when calling 'DescribeSubnets' action
|
||||
subnet.reload()
|
||||
|
||||
# For non default subnet, attribute value should be 'False'
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': False})
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': True})
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.should.be.ok
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_subnet_attribute_validation():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
client = boto3.client('ec2', region_name='us-west-1')
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
|
||||
with assert_raises(ParamValidationError):
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': 'invalid'})
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_get_by_id():
|
||||
ec2 = boto.ec2.connect_to_region('us-west-1')
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
vpcB = conn.create_vpc("10.0.0.0/16")
|
||||
subnetB1 = conn.create_subnet(
|
||||
vpcB.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
subnetB2 = conn.create_subnet(
|
||||
vpcB.id, "10.0.1.0/24", availability_zone='us-west-1b')
|
||||
|
||||
subnets_by_id = conn.get_all_subnets(subnet_ids=[subnetA.id, subnetB1.id])
|
||||
subnets_by_id.should.have.length_of(2)
|
||||
subnets_by_id = tuple(map(lambda s: s.id, subnets_by_id))
|
||||
subnetA.id.should.be.within(subnets_by_id)
|
||||
subnetB1.id.should.be.within(subnets_by_id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_subnets(subnet_ids=['subnet-does_not_exist'])
|
||||
cm.exception.code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_subnets_filtering():
|
||||
ec2 = boto.ec2.connect_to_region('us-west-1')
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
vpcB = conn.create_vpc("10.0.0.0/16")
|
||||
subnetB1 = conn.create_subnet(
|
||||
vpcB.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
subnetB2 = conn.create_subnet(
|
||||
vpcB.id, "10.0.1.0/24", availability_zone='us-west-1b')
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(3 + len(ec2.get_all_zones()))
|
||||
|
||||
# Filter by VPC ID
|
||||
subnets_by_vpc = conn.get_all_subnets(filters={'vpc-id': vpcB.id})
|
||||
subnets_by_vpc.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_vpc]).should.equal(
|
||||
set([subnetB1.id, subnetB2.id]))
|
||||
|
||||
# Filter by CIDR variations
|
||||
subnets_by_cidr1 = conn.get_all_subnets(filters={'cidr': "10.0.0.0/24"})
|
||||
subnets_by_cidr1.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr1]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
subnets_by_cidr2 = conn.get_all_subnets(
|
||||
filters={'cidr-block': "10.0.0.0/24"})
|
||||
subnets_by_cidr2.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr2]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
subnets_by_cidr3 = conn.get_all_subnets(
|
||||
filters={'cidrBlock': "10.0.0.0/24"})
|
||||
subnets_by_cidr3.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr3]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
# Filter by VPC ID and CIDR
|
||||
subnets_by_vpc_and_cidr = conn.get_all_subnets(
|
||||
filters={'vpc-id': vpcB.id, 'cidr': "10.0.0.0/24"})
|
||||
subnets_by_vpc_and_cidr.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_vpc_and_cidr]
|
||||
).should.equal(set([subnetB1.id]))
|
||||
|
||||
# Filter by subnet ID
|
||||
subnets_by_id = conn.get_all_subnets(filters={'subnet-id': subnetA.id})
|
||||
subnets_by_id.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_id]).should.equal(set([subnetA.id]))
|
||||
|
||||
# Filter by availabilityZone
|
||||
subnets_by_az = conn.get_all_subnets(
|
||||
filters={'availabilityZone': 'us-west-1a', 'vpc-id': vpcB.id})
|
||||
subnets_by_az.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_az]
|
||||
).should.equal(set([subnetB1.id]))
|
||||
|
||||
# Filter by defaultForAz
|
||||
|
||||
subnets_by_az = conn.get_all_subnets(filters={'defaultForAz': "true"})
|
||||
subnets_by_az.should.have.length_of(len(conn.get_all_zones()))
|
||||
|
||||
# Unsupported filter
|
||||
conn.get_all_subnets.when.called_with(
|
||||
filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@mock_cloudformation_deprecated
|
||||
def test_subnet_tags_through_cloudformation():
|
||||
vpc_conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpc = vpc_conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
subnet_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testSubnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": vpc.id,
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"AvailabilityZone": "us-west-1b",
|
||||
"Tags": [{
|
||||
"Key": "foo",
|
||||
"Value": "bar",
|
||||
}, {
|
||||
"Key": "blah",
|
||||
"Value": "baz",
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
|
||||
template_json = json.dumps(subnet_template)
|
||||
cf_conn.create_stack(
|
||||
"test_stack",
|
||||
template_body=template_json,
|
||||
)
|
||||
|
||||
subnet = vpc_conn.get_all_subnets(filters={'cidrBlock': '10.0.0.0/24'})[0]
|
||||
subnet.tags["foo"].should.equal("bar")
|
||||
subnet.tags["blah"].should.equal("baz")
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises # noqa
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
import boto.vpc
|
||||
from boto.exception import EC2ResponseError
|
||||
from botocore.exceptions import ParamValidationError
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_cloudformation_deprecated, mock_ec2, mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnets():
|
||||
ec2 = boto.connect_ec2('the_key', 'the_secret')
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(1 + len(ec2.get_all_zones()))
|
||||
|
||||
conn.delete_subnet(subnet.id)
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(0 + len(ec2.get_all_zones()))
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_subnet(subnet.id)
|
||||
cm.exception.code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_create_vpc_validation():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_subnet("vpc-abcd1234", "10.0.0.0/18")
|
||||
cm.exception.code.should.equal('InvalidVpcID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
|
||||
|
||||
subnet.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
subnet = conn.get_all_subnets(subnet_ids=[subnet.id])[0]
|
||||
subnet.tags.should.have.length_of(1)
|
||||
subnet.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_should_have_proper_availability_zone_set():
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1b')
|
||||
subnetA.availability_zone.should.equal('us-west-1b')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_default_subnet():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
default_vpc = list(ec2.vpcs.all())[0]
|
||||
default_vpc.cidr_block.should.equal('172.31.0.0/16')
|
||||
default_vpc.reload()
|
||||
default_vpc.is_default.should.be.ok
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=default_vpc.id, CidrBlock='172.31.0.0/20', AvailabilityZone='us-west-1a')
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_non_default_subnet():
|
||||
vpc_cli = boto.vpc.connect_to_region('us-west-1')
|
||||
|
||||
# Create the non default VPC
|
||||
vpc = vpc_cli.create_vpc("10.0.0.0/16")
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet = vpc_cli.create_subnet(vpc.id, "10.0.0.0/24")
|
||||
subnet = vpc_cli.get_all_subnets(subnet_ids=[subnet.id])[0]
|
||||
subnet.mapPublicIpOnLaunch.should.equal('false')
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_boto3_non_default_subnet():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
|
||||
# Create the non default VPC
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
vpc.reload()
|
||||
vpc.is_default.shouldnt.be.ok
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_subnet_attribute():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
client = boto3.client('ec2', region_name='us-west-1')
|
||||
|
||||
# Get the default VPC
|
||||
vpc = list(ec2.vpcs.all())[0]
|
||||
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
|
||||
# 'map_public_ip_on_launch' is set when calling 'DescribeSubnets' action
|
||||
subnet.reload()
|
||||
|
||||
# For non default subnet, attribute value should be 'False'
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': False})
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.shouldnt.be.ok
|
||||
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': True})
|
||||
subnet.reload()
|
||||
subnet.map_public_ip_on_launch.should.be.ok
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_modify_subnet_attribute_validation():
|
||||
ec2 = boto3.resource('ec2', region_name='us-west-1')
|
||||
client = boto3.client('ec2', region_name='us-west-1')
|
||||
vpc = ec2.create_vpc(CidrBlock='10.0.0.0/16')
|
||||
subnet = ec2.create_subnet(
|
||||
VpcId=vpc.id, CidrBlock='10.0.0.0/24', AvailabilityZone='us-west-1a')
|
||||
|
||||
with assert_raises(ParamValidationError):
|
||||
client.modify_subnet_attribute(
|
||||
SubnetId=subnet.id, MapPublicIpOnLaunch={'Value': 'invalid'})
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_subnet_get_by_id():
|
||||
ec2 = boto.ec2.connect_to_region('us-west-1')
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
vpcB = conn.create_vpc("10.0.0.0/16")
|
||||
subnetB1 = conn.create_subnet(
|
||||
vpcB.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
subnetB2 = conn.create_subnet(
|
||||
vpcB.id, "10.0.1.0/24", availability_zone='us-west-1b')
|
||||
|
||||
subnets_by_id = conn.get_all_subnets(subnet_ids=[subnetA.id, subnetB1.id])
|
||||
subnets_by_id.should.have.length_of(2)
|
||||
subnets_by_id = tuple(map(lambda s: s.id, subnets_by_id))
|
||||
subnetA.id.should.be.within(subnets_by_id)
|
||||
subnetB1.id.should.be.within(subnets_by_id)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.get_all_subnets(subnet_ids=['subnet-does_not_exist'])
|
||||
cm.exception.code.should.equal('InvalidSubnetID.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_subnets_filtering():
|
||||
ec2 = boto.ec2.connect_to_region('us-west-1')
|
||||
conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpcA = conn.create_vpc("10.0.0.0/16")
|
||||
subnetA = conn.create_subnet(
|
||||
vpcA.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
vpcB = conn.create_vpc("10.0.0.0/16")
|
||||
subnetB1 = conn.create_subnet(
|
||||
vpcB.id, "10.0.0.0/24", availability_zone='us-west-1a')
|
||||
subnetB2 = conn.create_subnet(
|
||||
vpcB.id, "10.0.1.0/24", availability_zone='us-west-1b')
|
||||
|
||||
all_subnets = conn.get_all_subnets()
|
||||
all_subnets.should.have.length_of(3 + len(ec2.get_all_zones()))
|
||||
|
||||
# Filter by VPC ID
|
||||
subnets_by_vpc = conn.get_all_subnets(filters={'vpc-id': vpcB.id})
|
||||
subnets_by_vpc.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_vpc]).should.equal(
|
||||
set([subnetB1.id, subnetB2.id]))
|
||||
|
||||
# Filter by CIDR variations
|
||||
subnets_by_cidr1 = conn.get_all_subnets(filters={'cidr': "10.0.0.0/24"})
|
||||
subnets_by_cidr1.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr1]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
subnets_by_cidr2 = conn.get_all_subnets(
|
||||
filters={'cidr-block': "10.0.0.0/24"})
|
||||
subnets_by_cidr2.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr2]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
subnets_by_cidr3 = conn.get_all_subnets(
|
||||
filters={'cidrBlock': "10.0.0.0/24"})
|
||||
subnets_by_cidr3.should.have.length_of(2)
|
||||
set([subnet.id for subnet in subnets_by_cidr3]
|
||||
).should.equal(set([subnetA.id, subnetB1.id]))
|
||||
|
||||
# Filter by VPC ID and CIDR
|
||||
subnets_by_vpc_and_cidr = conn.get_all_subnets(
|
||||
filters={'vpc-id': vpcB.id, 'cidr': "10.0.0.0/24"})
|
||||
subnets_by_vpc_and_cidr.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_vpc_and_cidr]
|
||||
).should.equal(set([subnetB1.id]))
|
||||
|
||||
# Filter by subnet ID
|
||||
subnets_by_id = conn.get_all_subnets(filters={'subnet-id': subnetA.id})
|
||||
subnets_by_id.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_id]).should.equal(set([subnetA.id]))
|
||||
|
||||
# Filter by availabilityZone
|
||||
subnets_by_az = conn.get_all_subnets(
|
||||
filters={'availabilityZone': 'us-west-1a', 'vpc-id': vpcB.id})
|
||||
subnets_by_az.should.have.length_of(1)
|
||||
set([subnet.id for subnet in subnets_by_az]
|
||||
).should.equal(set([subnetB1.id]))
|
||||
|
||||
# Filter by defaultForAz
|
||||
|
||||
subnets_by_az = conn.get_all_subnets(filters={'defaultForAz': "true"})
|
||||
subnets_by_az.should.have.length_of(len(conn.get_all_zones()))
|
||||
|
||||
# Unsupported filter
|
||||
conn.get_all_subnets.when.called_with(
|
||||
filters={'not-implemented-filter': 'foobar'}).should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
@mock_cloudformation_deprecated
|
||||
def test_subnet_tags_through_cloudformation():
|
||||
vpc_conn = boto.vpc.connect_to_region('us-west-1')
|
||||
vpc = vpc_conn.create_vpc("10.0.0.0/16")
|
||||
|
||||
subnet_template = {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Resources": {
|
||||
"testSubnet": {
|
||||
"Type": "AWS::EC2::Subnet",
|
||||
"Properties": {
|
||||
"VpcId": vpc.id,
|
||||
"CidrBlock": "10.0.0.0/24",
|
||||
"AvailabilityZone": "us-west-1b",
|
||||
"Tags": [{
|
||||
"Key": "foo",
|
||||
"Value": "bar",
|
||||
}, {
|
||||
"Key": "blah",
|
||||
"Value": "baz",
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
|
||||
template_json = json.dumps(subnet_template)
|
||||
cf_conn.create_stack(
|
||||
"test_stack",
|
||||
template_body=template_json,
|
||||
)
|
||||
|
||||
subnet = vpc_conn.get_all_subnets(filters={'cidrBlock': '10.0.0.0/24'})[0]
|
||||
subnet.tags["foo"].should.equal("bar")
|
||||
subnet.tags["blah"].should.equal("baz")
|
||||
|
|
|
|||
|
|
@ -1,453 +1,453 @@
|
|||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import itertools
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
from boto.ec2.instance import Reservation
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
from nose.tools import assert_raises
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_tag():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
instance.add_tag("a key", "some value", dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
chain = itertools.chain.from_iterable
|
||||
existing_instances = list(
|
||||
chain([res.instances for res in conn.get_all_instances()]))
|
||||
existing_instances.should.have.length_of(1)
|
||||
existing_instance = existing_instances[0]
|
||||
existing_instance.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_remove_tag():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
instance.remove_tag("a key", dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
instance.remove_tag("a key")
|
||||
conn.get_all_tags().should.have.length_of(0)
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
conn.get_all_tags().should.have.length_of(1)
|
||||
instance.remove_tag("a key", "some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_with_special_characters():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some<> value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some<> value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_tags():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
tag_dict = {'a key': 'some value',
|
||||
'another key': 'some other value',
|
||||
'blank key': ''}
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.create_tags(instance.id, tag_dict, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
tags = conn.get_all_tags()
|
||||
set([key for key in tag_dict]).should.equal(
|
||||
set([tag.name for tag in tags]))
|
||||
set([tag_dict[key] for key in tag_dict]).should.equal(
|
||||
set([tag.value for tag in tags]))
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_tag_limit_exceeded():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
tag_dict = {}
|
||||
for i in range(51):
|
||||
tag_dict['{0:02d}'.format(i + 1)] = ''
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal('TagLimitExceeded')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
instance.add_tag("a key", "a value")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal('TagLimitExceeded')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("a value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_invalid_parameter_tag_null():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
instance.add_tag("a key", None)
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_invalid_id():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags('ami-blah', {'key': 'tag'})
|
||||
cm.exception.code.should.equal('InvalidID')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags('blah-blah', {'key': 'tag'})
|
||||
cm.exception.code.should.equal('InvalidID')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_resource_id_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-id': instance.id})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-id': image_id})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(image_id)
|
||||
tag.res_type.should.equal('image')
|
||||
tag.name.should.equal("an image key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_resource_type_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-type': 'instance'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-type': 'image'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(image_id)
|
||||
tag.res_type.should.equal('image')
|
||||
tag.name.should.equal("an image key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_key_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'key': 'an instance key'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_value_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
reservation_b = conn.run_instances('ami-1234abcd')
|
||||
instance_b = reservation_b.instances[0]
|
||||
instance_b.add_tag("an instance key", "some other value")
|
||||
reservation_c = conn.run_instances('ami-1234abcd')
|
||||
instance_c = reservation_c.instances[0]
|
||||
instance_c.add_tag("an instance key", "other value*")
|
||||
reservation_d = conn.run_instances('ami-1234abcd')
|
||||
instance_d = reservation_d.instances[0]
|
||||
instance_d.add_tag("an instance key", "other value**")
|
||||
reservation_e = conn.run_instances('ami-1234abcd')
|
||||
instance_e = reservation_e.instances[0]
|
||||
instance_e.add_tag("an instance key", "other value*?")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': 'some value'})
|
||||
tags.should.have.length_of(2)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': 'some*value'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*some*value'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*some*value*'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*\*'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*\?'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_instances_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
reservation.should.be.a(Reservation)
|
||||
reservation.instances.should.have.length_of(1)
|
||||
instance = reservation.instances[0]
|
||||
|
||||
reservations = conn.get_all_instances()
|
||||
reservations.should.have.length_of(1)
|
||||
reservations[0].id.should.equal(reservation.id)
|
||||
instances = reservations[0].instances
|
||||
instances.should.have.length_of(1)
|
||||
instances[0].id.should.equal(instance.id)
|
||||
|
||||
conn.create_tags([instance.id], tags_to_be_set)
|
||||
reservations = conn.get_all_instances()
|
||||
instance = reservations[0].instances[0]
|
||||
retrieved_tags = instance.tags
|
||||
|
||||
# Cleanup of instance
|
||||
conn.terminate_instances([instances[0].id])
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_volumes_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
volume = conn.create_volume(80, "us-east-1a")
|
||||
|
||||
all_volumes = conn.get_all_volumes()
|
||||
volume = all_volumes[0]
|
||||
conn.create_tags([volume.id], tags_to_be_set)
|
||||
|
||||
# Fetch the volume again
|
||||
all_volumes = conn.get_all_volumes()
|
||||
volume = all_volumes[0]
|
||||
retrieved_tags = volume.tags
|
||||
|
||||
volume.delete()
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_snapshots_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
conn = boto.connect_ec2(aws_access_key_id='the_key',
|
||||
aws_secret_access_key='the_secret')
|
||||
volume = conn.create_volume(80, "eu-west-1a")
|
||||
snapshot = conn.create_snapshot(volume.id)
|
||||
conn.create_tags([snapshot.id], tags_to_be_set)
|
||||
|
||||
# Fetch the snapshot again
|
||||
all_snapshots = conn.get_all_snapshots()
|
||||
snapshot = [item for item in all_snapshots if item.id == snapshot.id][0]
|
||||
retrieved_tags = snapshot.tags
|
||||
|
||||
conn.delete_snapshot(snapshot.id)
|
||||
volume.delete()
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_filter_instances_by_wildcard_tags():
|
||||
conn = boto.connect_ec2(aws_access_key_id='the_key',
|
||||
aws_secret_access_key='the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance_a = reservation.instances[0]
|
||||
instance_a.add_tag("Key1", "Value1")
|
||||
reservation_b = conn.run_instances('ami-1234abcd')
|
||||
instance_b = reservation_b.instances[0]
|
||||
instance_b.add_tag("Key1", "Value2")
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag:Key1': 'Value*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag-key': 'Key*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag-value': 'Value*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_volume_with_tags():
|
||||
client = boto3.client('ec2', 'us-west-2')
|
||||
response = client.create_volume(
|
||||
AvailabilityZone='us-west-2',
|
||||
Encrypted=False,
|
||||
Size=40,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'volume',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_TAG',
|
||||
'Value': 'TEST_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
assert response['Tags'][0]['Key'] == 'TEST_TAG'
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_snapshot_with_tags():
|
||||
client = boto3.client('ec2', 'us-west-2')
|
||||
volume_id = client.create_volume(
|
||||
AvailabilityZone='us-west-2',
|
||||
Encrypted=False,
|
||||
Size=40,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'volume',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_TAG',
|
||||
'Value': 'TEST_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)['VolumeId']
|
||||
snapshot = client.create_snapshot(
|
||||
VolumeId=volume_id,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'snapshot',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_SNAPSHOT_TAG',
|
||||
'Value': 'TEST_SNAPSHOT_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
expected_tags = [{
|
||||
'Key': 'TEST_SNAPSHOT_TAG',
|
||||
'Value': 'TEST_SNAPSHOT_VALUE'
|
||||
}]
|
||||
|
||||
assert snapshot['Tags'] == expected_tags
|
||||
from __future__ import unicode_literals
|
||||
from nose.tools import assert_raises
|
||||
|
||||
import itertools
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
from boto.exception import EC2ResponseError
|
||||
from boto.ec2.instance import Reservation
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated, mock_ec2
|
||||
from nose.tools import assert_raises
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_add_tag():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
instance.add_tag("a key", "some value", dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
chain = itertools.chain.from_iterable
|
||||
existing_instances = list(
|
||||
chain([res.instances for res in conn.get_all_instances()]))
|
||||
existing_instances.should.have.length_of(1)
|
||||
existing_instance = existing_instances[0]
|
||||
existing_instance.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_remove_tag():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
instance.remove_tag("a key", dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the DeleteTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
instance.remove_tag("a key")
|
||||
conn.get_all_tags().should.have.length_of(0)
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
conn.get_all_tags().should.have.length_of(1)
|
||||
instance.remove_tag("a key", "some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_with_special_characters():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
instance.add_tag("a key", "some<> value")
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some<> value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_tags():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
tag_dict = {'a key': 'some value',
|
||||
'another key': 'some other value',
|
||||
'blank key': ''}
|
||||
|
||||
with assert_raises(EC2ResponseError) as ex:
|
||||
conn.create_tags(instance.id, tag_dict, dry_run=True)
|
||||
ex.exception.error_code.should.equal('DryRunOperation')
|
||||
ex.exception.status.should.equal(400)
|
||||
ex.exception.message.should.equal(
|
||||
'An error occurred (DryRunOperation) when calling the CreateTags operation: Request would have succeeded, but DryRun flag is set')
|
||||
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
tags = conn.get_all_tags()
|
||||
set([key for key in tag_dict]).should.equal(
|
||||
set([tag.name for tag in tags]))
|
||||
set([tag_dict[key] for key in tag_dict]).should.equal(
|
||||
set([tag.value for tag in tags]))
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_tag_limit_exceeded():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
tag_dict = {}
|
||||
for i in range(51):
|
||||
tag_dict['{0:02d}'.format(i + 1)] = ''
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal('TagLimitExceeded')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
instance.add_tag("a key", "a value")
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags(instance.id, tag_dict)
|
||||
cm.exception.code.should.equal('TagLimitExceeded')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
tags = conn.get_all_tags()
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("a value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_invalid_parameter_tag_null():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
instance.add_tag("a key", None)
|
||||
cm.exception.code.should.equal('InvalidParameterValue')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_invalid_id():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags('ami-blah', {'key': 'tag'})
|
||||
cm.exception.code.should.equal('InvalidID')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.create_tags('blah-blah', {'key': 'tag'})
|
||||
cm.exception.code.should.equal('InvalidID')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_resource_id_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-id': instance.id})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-id': image_id})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(image_id)
|
||||
tag.res_type.should.equal('image')
|
||||
tag.name.should.equal("an image key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_resource_type_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-type': 'instance'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'resource-type': 'image'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(image_id)
|
||||
tag.res_type.should.equal('image')
|
||||
tag.name.should.equal("an image key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_key_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'key': 'an instance key'})
|
||||
tag = tags[0]
|
||||
tags.should.have.length_of(1)
|
||||
tag.res_id.should.equal(instance.id)
|
||||
tag.res_type.should.equal('instance')
|
||||
tag.name.should.equal("an instance key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_get_all_tags_value_filter():
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance = reservation.instances[0]
|
||||
instance.add_tag("an instance key", "some value")
|
||||
reservation_b = conn.run_instances('ami-1234abcd')
|
||||
instance_b = reservation_b.instances[0]
|
||||
instance_b.add_tag("an instance key", "some other value")
|
||||
reservation_c = conn.run_instances('ami-1234abcd')
|
||||
instance_c = reservation_c.instances[0]
|
||||
instance_c.add_tag("an instance key", "other value*")
|
||||
reservation_d = conn.run_instances('ami-1234abcd')
|
||||
instance_d = reservation_d.instances[0]
|
||||
instance_d.add_tag("an instance key", "other value**")
|
||||
reservation_e = conn.run_instances('ami-1234abcd')
|
||||
instance_e = reservation_e.instances[0]
|
||||
instance_e.add_tag("an instance key", "other value*?")
|
||||
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
|
||||
image = conn.get_image(image_id)
|
||||
image.add_tag("an image key", "some value")
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': 'some value'})
|
||||
tags.should.have.length_of(2)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': 'some*value'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*some*value'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*some*value*'})
|
||||
tags.should.have.length_of(3)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*\*'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
tags = conn.get_all_tags(filters={'value': '*value\*\?'})
|
||||
tags.should.have.length_of(1)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_instances_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
reservation.should.be.a(Reservation)
|
||||
reservation.instances.should.have.length_of(1)
|
||||
instance = reservation.instances[0]
|
||||
|
||||
reservations = conn.get_all_instances()
|
||||
reservations.should.have.length_of(1)
|
||||
reservations[0].id.should.equal(reservation.id)
|
||||
instances = reservations[0].instances
|
||||
instances.should.have.length_of(1)
|
||||
instances[0].id.should.equal(instance.id)
|
||||
|
||||
conn.create_tags([instance.id], tags_to_be_set)
|
||||
reservations = conn.get_all_instances()
|
||||
instance = reservations[0].instances[0]
|
||||
retrieved_tags = instance.tags
|
||||
|
||||
# Cleanup of instance
|
||||
conn.terminate_instances([instances[0].id])
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_volumes_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||
volume = conn.create_volume(80, "us-east-1a")
|
||||
|
||||
all_volumes = conn.get_all_volumes()
|
||||
volume = all_volumes[0]
|
||||
conn.create_tags([volume.id], tags_to_be_set)
|
||||
|
||||
# Fetch the volume again
|
||||
all_volumes = conn.get_all_volumes()
|
||||
volume = all_volumes[0]
|
||||
retrieved_tags = volume.tags
|
||||
|
||||
volume.delete()
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_retrieved_snapshots_must_contain_their_tags():
|
||||
tag_key = 'Tag name'
|
||||
tag_value = 'Tag value'
|
||||
tags_to_be_set = {tag_key: tag_value}
|
||||
conn = boto.connect_ec2(aws_access_key_id='the_key',
|
||||
aws_secret_access_key='the_secret')
|
||||
volume = conn.create_volume(80, "eu-west-1a")
|
||||
snapshot = conn.create_snapshot(volume.id)
|
||||
conn.create_tags([snapshot.id], tags_to_be_set)
|
||||
|
||||
# Fetch the snapshot again
|
||||
all_snapshots = conn.get_all_snapshots()
|
||||
snapshot = [item for item in all_snapshots if item.id == snapshot.id][0]
|
||||
retrieved_tags = snapshot.tags
|
||||
|
||||
conn.delete_snapshot(snapshot.id)
|
||||
volume.delete()
|
||||
|
||||
# Check whether tag is present with correct value
|
||||
retrieved_tags[tag_key].should.equal(tag_value)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_filter_instances_by_wildcard_tags():
|
||||
conn = boto.connect_ec2(aws_access_key_id='the_key',
|
||||
aws_secret_access_key='the_secret')
|
||||
reservation = conn.run_instances('ami-1234abcd')
|
||||
instance_a = reservation.instances[0]
|
||||
instance_a.add_tag("Key1", "Value1")
|
||||
reservation_b = conn.run_instances('ami-1234abcd')
|
||||
instance_b = reservation_b.instances[0]
|
||||
instance_b.add_tag("Key1", "Value2")
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag:Key1': 'Value*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag-key': 'Key*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
reservations = conn.get_all_instances(filters={'tag-value': 'Value*'})
|
||||
reservations.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_volume_with_tags():
|
||||
client = boto3.client('ec2', 'us-west-2')
|
||||
response = client.create_volume(
|
||||
AvailabilityZone='us-west-2',
|
||||
Encrypted=False,
|
||||
Size=40,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'volume',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_TAG',
|
||||
'Value': 'TEST_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
assert response['Tags'][0]['Key'] == 'TEST_TAG'
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_create_snapshot_with_tags():
|
||||
client = boto3.client('ec2', 'us-west-2')
|
||||
volume_id = client.create_volume(
|
||||
AvailabilityZone='us-west-2',
|
||||
Encrypted=False,
|
||||
Size=40,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'volume',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_TAG',
|
||||
'Value': 'TEST_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)['VolumeId']
|
||||
snapshot = client.create_snapshot(
|
||||
VolumeId=volume_id,
|
||||
TagSpecifications=[
|
||||
{
|
||||
'ResourceType': 'snapshot',
|
||||
'Tags': [
|
||||
{
|
||||
'Key': 'TEST_SNAPSHOT_TAG',
|
||||
'Value': 'TEST_SNAPSHOT_VALUE'
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
expected_tags = [{
|
||||
'Key': 'TEST_SNAPSHOT_TAG',
|
||||
'Value': 'TEST_SNAPSHOT_VALUE'
|
||||
}]
|
||||
|
||||
assert snapshot['Tags'] == expected_tags
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from moto.ec2 import utils
|
||||
|
||||
|
||||
def test_random_key_pair():
|
||||
key_pair = utils.random_key_pair()
|
||||
assert len(key_pair['fingerprint']) == 59
|
||||
assert key_pair['material'].startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
assert key_pair['material'].endswith('-----END RSA PRIVATE KEY-----')
|
||||
from moto.ec2 import utils
|
||||
|
||||
|
||||
def test_random_key_pair():
|
||||
key_pair = utils.random_key_pair()
|
||||
assert len(key_pair['fingerprint']) == 59
|
||||
assert key_pair['material'].startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||
assert key_pair['material'].endswith('-----END RSA PRIVATE KEY-----')
|
||||
|
|
|
|||
|
|
@ -1,105 +1,105 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_virtual_private_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
vpn_gateway.should_not.be.none
|
||||
vpn_gateway.id.should.match(r'vgw-\w+')
|
||||
vpn_gateway.type.should.equal('ipsec.1')
|
||||
vpn_gateway.state.should.equal('available')
|
||||
vpn_gateway.availability_zone.should.equal('us-east-1a')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_gateway():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(1)
|
||||
|
||||
gateway = vgws[0]
|
||||
gateway.id.should.match(r'vgw-\w+')
|
||||
gateway.id.should.equal(vpn_gateway.id)
|
||||
vpn_gateway.type.should.equal('ipsec.1')
|
||||
vpn_gateway.state.should.equal('available')
|
||||
vpn_gateway.availability_zone.should.equal('us-east-1a')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_vpc_attachment():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.attach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal('attached')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_gateway():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.delete_vpn_gateway(vpn_gateway.id)
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
vpn_gateway.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
||||
vpn_gateway.tags.should.have.length_of(1)
|
||||
vpn_gateway.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_detach_vpn_gateway():
|
||||
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.attach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal('attached')
|
||||
|
||||
conn.detach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(0)
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_virtual_private_gateways():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
vpn_gateway.should_not.be.none
|
||||
vpn_gateway.id.should.match(r'vgw-\w+')
|
||||
vpn_gateway.type.should.equal('ipsec.1')
|
||||
vpn_gateway.state.should.equal('available')
|
||||
vpn_gateway.availability_zone.should.equal('us-east-1a')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_gateway():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(1)
|
||||
|
||||
gateway = vgws[0]
|
||||
gateway.id.should.match(r'vgw-\w+')
|
||||
gateway.id.should.equal(vpn_gateway.id)
|
||||
vpn_gateway.type.should.equal('ipsec.1')
|
||||
vpn_gateway.state.should.equal('available')
|
||||
vpn_gateway.availability_zone.should.equal('us-east-1a')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_vpc_attachment():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.attach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal('attached')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_gateway():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.delete_vpn_gateway(vpn_gateway.id)
|
||||
vgws = conn.get_all_vpn_gateways()
|
||||
vgws.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_vpn_gateway_tagging():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
vpn_gateway.add_tag("a key", "some value")
|
||||
|
||||
tag = conn.get_all_tags()[0]
|
||||
tag.name.should.equal("a key")
|
||||
tag.value.should.equal("some value")
|
||||
|
||||
# Refresh the subnet
|
||||
vpn_gateway = conn.get_all_vpn_gateways()[0]
|
||||
vpn_gateway.tags.should.have.length_of(1)
|
||||
vpn_gateway.tags["a key"].should.equal("some value")
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_detach_vpn_gateway():
|
||||
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
vpn_gateway = conn.create_vpn_gateway('ipsec.1', 'us-east-1a')
|
||||
|
||||
conn.attach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(1)
|
||||
attachments[0].vpc_id.should.equal(vpc.id)
|
||||
attachments[0].state.should.equal('attached')
|
||||
|
||||
conn.detach_vpn_gateway(
|
||||
vpn_gateway_id=vpn_gateway.id,
|
||||
vpc_id=vpc.id
|
||||
)
|
||||
|
||||
gateway = conn.get_all_vpn_gateways()[0]
|
||||
attachments = gateway.attachments
|
||||
attachments.should.have.length_of(0)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_export():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_export():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_import():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vm_import():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,132 +1,132 @@
|
|||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
from moto.ec2.exceptions import EC2ClientError
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
from tests.helpers import requires_boto_gte
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
peer_vpc = conn.create_vpc("11.0.0.0/16")
|
||||
|
||||
vpc_pcx = conn.create_vpc_peering_connection(vpc.id, peer_vpc.id)
|
||||
vpc_pcx._status.code.should.equal('initiating-request')
|
||||
|
||||
return vpc_pcx
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_get_all():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
vpc_pcx._status.code.should.equal('initiating-request')
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('pending-acceptance')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_accept():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
vpc_pcx._status.code.should.equal('active')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal('InvalidStateTransition')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('active')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_reject():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
verdict = conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
verdict.should.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal('InvalidStateTransition')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('rejected')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.1")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_delete():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
verdict = vpc_pcx.delete()
|
||||
verdict.should.equal(True)
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_vpc_peering_connection("pcx-1234abcd")
|
||||
cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vpc_peering_connections_cross_region():
|
||||
# create vpc in us-west-1 and ap-northeast-1
|
||||
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
|
||||
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
|
||||
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
|
||||
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
|
||||
# create peering
|
||||
vpc_pcx = ec2_usw1.create_vpc_peering_connection(
|
||||
VpcId=vpc_usw1.id,
|
||||
PeerVpcId=vpc_apn1.id,
|
||||
PeerRegion='ap-northeast-1',
|
||||
)
|
||||
vpc_pcx.status['Code'].should.equal('initiating-request')
|
||||
vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id)
|
||||
vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vpc_peering_connections_cross_region_fail():
|
||||
# create vpc in us-west-1 and ap-northeast-1
|
||||
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
|
||||
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
|
||||
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
|
||||
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
|
||||
# create peering wrong region with no vpc
|
||||
with assert_raises(ClientError) as cm:
|
||||
ec2_usw1.create_vpc_peering_connection(
|
||||
VpcId=vpc_usw1.id,
|
||||
PeerVpcId=vpc_apn1.id,
|
||||
PeerRegion='ap-northeast-2')
|
||||
cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound')
|
||||
from __future__ import unicode_literals
|
||||
# Ensure 'assert_raises' context manager support for Python 2.6
|
||||
import tests.backport_assert_raises
|
||||
from nose.tools import assert_raises
|
||||
from moto.ec2.exceptions import EC2ClientError
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
import boto3
|
||||
import boto
|
||||
from boto.exception import EC2ResponseError
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2, mock_ec2_deprecated
|
||||
from tests.helpers import requires_boto_gte
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc = conn.create_vpc("10.0.0.0/16")
|
||||
peer_vpc = conn.create_vpc("11.0.0.0/16")
|
||||
|
||||
vpc_pcx = conn.create_vpc_peering_connection(vpc.id, peer_vpc.id)
|
||||
vpc_pcx._status.code.should.equal('initiating-request')
|
||||
|
||||
return vpc_pcx
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_get_all():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
vpc_pcx._status.code.should.equal('initiating-request')
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('pending-acceptance')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_accept():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
vpc_pcx._status.code.should.equal('active')
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal('InvalidStateTransition')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('active')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.0")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_reject():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
verdict = conn.reject_vpc_peering_connection(vpc_pcx.id)
|
||||
verdict.should.equal(True)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.accept_vpc_peering_connection(vpc_pcx.id)
|
||||
cm.exception.code.should.equal('InvalidStateTransition')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(1)
|
||||
all_vpc_pcxs[0]._status.code.should.equal('rejected')
|
||||
|
||||
|
||||
@requires_boto_gte("2.32.1")
|
||||
@mock_ec2_deprecated
|
||||
def test_vpc_peering_connections_delete():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpc_pcx = test_vpc_peering_connections()
|
||||
|
||||
verdict = vpc_pcx.delete()
|
||||
verdict.should.equal(True)
|
||||
|
||||
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
|
||||
all_vpc_pcxs.should.have.length_of(0)
|
||||
|
||||
with assert_raises(EC2ResponseError) as cm:
|
||||
conn.delete_vpc_peering_connection("pcx-1234abcd")
|
||||
cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound')
|
||||
cm.exception.status.should.equal(400)
|
||||
cm.exception.request_id.should_not.be.none
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vpc_peering_connections_cross_region():
|
||||
# create vpc in us-west-1 and ap-northeast-1
|
||||
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
|
||||
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
|
||||
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
|
||||
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
|
||||
# create peering
|
||||
vpc_pcx = ec2_usw1.create_vpc_peering_connection(
|
||||
VpcId=vpc_usw1.id,
|
||||
PeerVpcId=vpc_apn1.id,
|
||||
PeerRegion='ap-northeast-1',
|
||||
)
|
||||
vpc_pcx.status['Code'].should.equal('initiating-request')
|
||||
vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id)
|
||||
vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id)
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_vpc_peering_connections_cross_region_fail():
|
||||
# create vpc in us-west-1 and ap-northeast-1
|
||||
ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
|
||||
vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
|
||||
ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
|
||||
vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
|
||||
# create peering wrong region with no vpc
|
||||
with assert_raises(ClientError) as cm:
|
||||
ec2_usw1.create_vpc_peering_connection(
|
||||
VpcId=vpc_usw1.id,
|
||||
PeerVpcId=vpc_apn1.id,
|
||||
PeerRegion='ap-northeast-2')
|
||||
cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound')
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,51 +1,51 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
from nose.tools import assert_raises
|
||||
import sure # noqa
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_connection = conn.create_vpn_connection(
|
||||
'ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
vpn_connection.should_not.be.none
|
||||
vpn_connection.id.should.match(r'vpn-\w+')
|
||||
vpn_connection.type.should.equal('ipsec.1')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_connection = conn.create_vpn_connection(
|
||||
'ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
conn.delete_vpn_connection(vpn_connection.id)
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_connections_bad_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.delete_vpn_connection('vpn-0123abcd')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(0)
|
||||
conn.create_vpn_connection('ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
vpn = conn.create_vpn_connection('ipsec.1', 'vgw-1234abcd', 'cgw-1234abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(2)
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections(vpn.id)
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
from nose.tools import assert_raises
|
||||
import sure # noqa
|
||||
from boto.exception import EC2ResponseError
|
||||
|
||||
from moto import mock_ec2_deprecated
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_create_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_connection = conn.create_vpn_connection(
|
||||
'ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
vpn_connection.should_not.be.none
|
||||
vpn_connection.id.should.match(r'vpn-\w+')
|
||||
vpn_connection.type.should.equal('ipsec.1')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
vpn_connection = conn.create_vpn_connection(
|
||||
'ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
conn.delete_vpn_connection(vpn_connection.id)
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_delete_vpn_connections_bad_id():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
with assert_raises(EC2ResponseError):
|
||||
conn.delete_vpn_connection('vpn-0123abcd')
|
||||
|
||||
|
||||
@mock_ec2_deprecated
|
||||
def test_describe_vpn_connections():
|
||||
conn = boto.connect_vpc('the_key', 'the_secret')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(0)
|
||||
conn.create_vpn_connection('ipsec.1', 'vgw-0123abcd', 'cgw-0123abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
vpn = conn.create_vpn_connection('ipsec.1', 'vgw-1234abcd', 'cgw-1234abcd')
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections()
|
||||
list_of_vpn_connections.should.have.length_of(2)
|
||||
list_of_vpn_connections = conn.get_all_vpn_connections(vpn.id)
|
||||
list_of_vpn_connections.should.have.length_of(1)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_windows():
|
||||
pass
|
||||
from __future__ import unicode_literals
|
||||
import boto
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_ec2
|
||||
|
||||
|
||||
@mock_ec2
|
||||
def test_windows():
|
||||
pass
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,17 +1,17 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_elb_describe_instances():
|
||||
backend = server.create_backend_app("elb")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeLoadBalancers&Version=2015-12-01')
|
||||
|
||||
res.data.should.contain(b'DescribeLoadBalancersResponse')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_elb_describe_instances():
|
||||
backend = server.create_backend_app("elb")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeLoadBalancers&Version=2015-12-01')
|
||||
|
||||
res.data.should.contain(b'DescribeLoadBalancersResponse')
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,17 +1,17 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_elbv2_describe_load_balancers():
|
||||
backend = server.create_backend_app("elbv2")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeLoadBalancers&Version=2015-12-01')
|
||||
|
||||
res.data.should.contain(b'DescribeLoadBalancersResponse')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_elbv2_describe_load_balancers():
|
||||
backend = server.create_backend_app("elbv2")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeLoadBalancers&Version=2015-12-01')
|
||||
|
||||
res.data.should.contain(b'DescribeLoadBalancersResponse')
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,18 +1,18 @@
|
|||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_describe_jobflows():
|
||||
backend = server.create_backend_app("emr")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeJobFlows')
|
||||
|
||||
res.data.should.contain(b'<DescribeJobFlowsResult>')
|
||||
res.data.should.contain(b'<JobFlows>')
|
||||
from __future__ import unicode_literals
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_describe_jobflows():
|
||||
backend = server.create_backend_app("emr")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/?Action=DescribeJobFlows')
|
||||
|
||||
res.data.should.contain(b'<DescribeJobFlowsResult>')
|
||||
res.data.should.contain(b'<JobFlows>')
|
||||
|
|
|
|||
|
|
@ -1,211 +1,211 @@
|
|||
import random
|
||||
|
||||
import boto3
|
||||
import json
|
||||
|
||||
from moto.events import mock_events
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
|
||||
RULES = [
|
||||
{'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'},
|
||||
{'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'},
|
||||
{'Name': 'test3', 'EventPattern': '{"source": ["test-source"]}'}
|
||||
]
|
||||
|
||||
TARGETS = {
|
||||
'test-target-1': {
|
||||
'Id': 'test-target-1',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-1',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-2': {
|
||||
'Id': 'test-target-2',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-2',
|
||||
'Rules': ['test1', 'test3']
|
||||
},
|
||||
'test-target-3': {
|
||||
'Id': 'test-target-3',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-3',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-4': {
|
||||
'Id': 'test-target-4',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-4',
|
||||
'Rules': ['test1', 'test3']
|
||||
},
|
||||
'test-target-5': {
|
||||
'Id': 'test-target-5',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-5',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-6': {
|
||||
'Id': 'test-target-6',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-6',
|
||||
'Rules': ['test1', 'test3']
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_random_rule():
|
||||
return RULES[random.randint(0, len(RULES) - 1)]
|
||||
|
||||
|
||||
def generate_environment():
|
||||
client = boto3.client('events', 'us-west-2')
|
||||
|
||||
for rule in RULES:
|
||||
client.put_rule(
|
||||
Name=rule['Name'],
|
||||
ScheduleExpression=rule.get('ScheduleExpression', ''),
|
||||
EventPattern=rule.get('EventPattern', '')
|
||||
)
|
||||
|
||||
targets = []
|
||||
for target in TARGETS:
|
||||
if rule['Name'] in TARGETS[target].get('Rules'):
|
||||
targets.append({'Id': target, 'Arn': TARGETS[target]['Arn']})
|
||||
|
||||
client.put_targets(Rule=rule['Name'], Targets=targets)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rules():
|
||||
client = generate_environment()
|
||||
response = client.list_rules()
|
||||
|
||||
assert(response is not None)
|
||||
assert(len(response['Rules']) > 0)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_describe_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
response = client.describe_rule(Name=rule_name)
|
||||
|
||||
assert(response is not None)
|
||||
assert(response.get('Name') == rule_name)
|
||||
assert(response.get('Arn') is not None)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_enable_disable_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
|
||||
# Rules should start out enabled in these tests.
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'ENABLED')
|
||||
|
||||
client.disable_rule(Name=rule_name)
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'DISABLED')
|
||||
|
||||
client.enable_rule(Name=rule_name)
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'ENABLED')
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rule_names_by_target():
|
||||
test_1_target = TARGETS['test-target-1']
|
||||
test_2_target = TARGETS['test-target-2']
|
||||
client = generate_environment()
|
||||
|
||||
rules = client.list_rule_names_by_target(TargetArn=test_1_target['Arn'])
|
||||
assert(len(rules['RuleNames']) == len(test_1_target['Rules']))
|
||||
for rule in rules['RuleNames']:
|
||||
assert(rule in test_1_target['Rules'])
|
||||
|
||||
rules = client.list_rule_names_by_target(TargetArn=test_2_target['Arn'])
|
||||
assert(len(rules['RuleNames']) == len(test_2_target['Rules']))
|
||||
for rule in rules['RuleNames']:
|
||||
assert(rule in test_2_target['Rules'])
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rules():
|
||||
client = generate_environment()
|
||||
|
||||
rules = client.list_rules()
|
||||
assert(len(rules['Rules']) == len(RULES))
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_delete_rule():
|
||||
client = generate_environment()
|
||||
|
||||
client.delete_rule(Name=RULES[0]['Name'])
|
||||
rules = client.list_rules()
|
||||
assert(len(rules['Rules']) == len(RULES) - 1)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_targets_by_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)
|
||||
|
||||
expected_targets = []
|
||||
for target in TARGETS:
|
||||
if rule_name in TARGETS[target].get('Rules'):
|
||||
expected_targets.append(target)
|
||||
|
||||
assert(len(targets['Targets']) == len(expected_targets))
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_remove_targets():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)['Targets']
|
||||
targets_before = len(targets)
|
||||
assert(targets_before > 0)
|
||||
|
||||
client.remove_targets(Rule=rule_name, Ids=[targets[0]['Id']])
|
||||
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)['Targets']
|
||||
targets_after = len(targets)
|
||||
assert(targets_before - 1 == targets_after)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_permissions():
|
||||
client = boto3.client('events', 'eu-central-1')
|
||||
|
||||
client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1')
|
||||
client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2')
|
||||
|
||||
resp = client.describe_event_bus()
|
||||
resp_policy = json.loads(resp['Policy'])
|
||||
assert len(resp_policy['Statement']) == 2
|
||||
|
||||
client.remove_permission(StatementId='Account2')
|
||||
|
||||
resp = client.describe_event_bus()
|
||||
resp_policy = json.loads(resp['Policy'])
|
||||
assert len(resp_policy['Statement']) == 1
|
||||
assert resp_policy['Statement'][0]['Sid'] == 'Account1'
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_put_events():
|
||||
client = boto3.client('events', 'eu-central-1')
|
||||
|
||||
event = {
|
||||
"Source": "com.mycompany.myapp",
|
||||
"Detail": '{"key1": "value3", "key2": "value4"}',
|
||||
"Resources": ["resource1", "resource2"],
|
||||
"DetailType": "myDetailType"
|
||||
}
|
||||
|
||||
client.put_events(Entries=[event])
|
||||
# Boto3 would error if it didn't return 200 OK
|
||||
|
||||
with assert_raises(ClientError):
|
||||
client.put_events(Entries=[event]*20)
|
||||
import random
|
||||
|
||||
import boto3
|
||||
import json
|
||||
|
||||
from moto.events import mock_events
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
|
||||
RULES = [
|
||||
{'Name': 'test1', 'ScheduleExpression': 'rate(5 minutes)'},
|
||||
{'Name': 'test2', 'ScheduleExpression': 'rate(1 minute)'},
|
||||
{'Name': 'test3', 'EventPattern': '{"source": ["test-source"]}'}
|
||||
]
|
||||
|
||||
TARGETS = {
|
||||
'test-target-1': {
|
||||
'Id': 'test-target-1',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-1',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-2': {
|
||||
'Id': 'test-target-2',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-2',
|
||||
'Rules': ['test1', 'test3']
|
||||
},
|
||||
'test-target-3': {
|
||||
'Id': 'test-target-3',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-3',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-4': {
|
||||
'Id': 'test-target-4',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-4',
|
||||
'Rules': ['test1', 'test3']
|
||||
},
|
||||
'test-target-5': {
|
||||
'Id': 'test-target-5',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-5',
|
||||
'Rules': ['test1', 'test2']
|
||||
},
|
||||
'test-target-6': {
|
||||
'Id': 'test-target-6',
|
||||
'Arn': 'arn:aws:lambda:us-west-2:111111111111:function:test-function-6',
|
||||
'Rules': ['test1', 'test3']
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_random_rule():
|
||||
return RULES[random.randint(0, len(RULES) - 1)]
|
||||
|
||||
|
||||
def generate_environment():
|
||||
client = boto3.client('events', 'us-west-2')
|
||||
|
||||
for rule in RULES:
|
||||
client.put_rule(
|
||||
Name=rule['Name'],
|
||||
ScheduleExpression=rule.get('ScheduleExpression', ''),
|
||||
EventPattern=rule.get('EventPattern', '')
|
||||
)
|
||||
|
||||
targets = []
|
||||
for target in TARGETS:
|
||||
if rule['Name'] in TARGETS[target].get('Rules'):
|
||||
targets.append({'Id': target, 'Arn': TARGETS[target]['Arn']})
|
||||
|
||||
client.put_targets(Rule=rule['Name'], Targets=targets)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rules():
|
||||
client = generate_environment()
|
||||
response = client.list_rules()
|
||||
|
||||
assert(response is not None)
|
||||
assert(len(response['Rules']) > 0)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_describe_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
response = client.describe_rule(Name=rule_name)
|
||||
|
||||
assert(response is not None)
|
||||
assert(response.get('Name') == rule_name)
|
||||
assert(response.get('Arn') is not None)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_enable_disable_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
|
||||
# Rules should start out enabled in these tests.
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'ENABLED')
|
||||
|
||||
client.disable_rule(Name=rule_name)
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'DISABLED')
|
||||
|
||||
client.enable_rule(Name=rule_name)
|
||||
rule = client.describe_rule(Name=rule_name)
|
||||
assert(rule['State'] == 'ENABLED')
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rule_names_by_target():
|
||||
test_1_target = TARGETS['test-target-1']
|
||||
test_2_target = TARGETS['test-target-2']
|
||||
client = generate_environment()
|
||||
|
||||
rules = client.list_rule_names_by_target(TargetArn=test_1_target['Arn'])
|
||||
assert(len(rules['RuleNames']) == len(test_1_target['Rules']))
|
||||
for rule in rules['RuleNames']:
|
||||
assert(rule in test_1_target['Rules'])
|
||||
|
||||
rules = client.list_rule_names_by_target(TargetArn=test_2_target['Arn'])
|
||||
assert(len(rules['RuleNames']) == len(test_2_target['Rules']))
|
||||
for rule in rules['RuleNames']:
|
||||
assert(rule in test_2_target['Rules'])
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_rules():
|
||||
client = generate_environment()
|
||||
|
||||
rules = client.list_rules()
|
||||
assert(len(rules['Rules']) == len(RULES))
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_delete_rule():
|
||||
client = generate_environment()
|
||||
|
||||
client.delete_rule(Name=RULES[0]['Name'])
|
||||
rules = client.list_rules()
|
||||
assert(len(rules['Rules']) == len(RULES) - 1)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_list_targets_by_rule():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)
|
||||
|
||||
expected_targets = []
|
||||
for target in TARGETS:
|
||||
if rule_name in TARGETS[target].get('Rules'):
|
||||
expected_targets.append(target)
|
||||
|
||||
assert(len(targets['Targets']) == len(expected_targets))
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_remove_targets():
|
||||
rule_name = get_random_rule()['Name']
|
||||
client = generate_environment()
|
||||
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)['Targets']
|
||||
targets_before = len(targets)
|
||||
assert(targets_before > 0)
|
||||
|
||||
client.remove_targets(Rule=rule_name, Ids=[targets[0]['Id']])
|
||||
|
||||
targets = client.list_targets_by_rule(Rule=rule_name)['Targets']
|
||||
targets_after = len(targets)
|
||||
assert(targets_before - 1 == targets_after)
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_permissions():
|
||||
client = boto3.client('events', 'eu-central-1')
|
||||
|
||||
client.put_permission(Action='events:PutEvents', Principal='111111111111', StatementId='Account1')
|
||||
client.put_permission(Action='events:PutEvents', Principal='222222222222', StatementId='Account2')
|
||||
|
||||
resp = client.describe_event_bus()
|
||||
resp_policy = json.loads(resp['Policy'])
|
||||
assert len(resp_policy['Statement']) == 2
|
||||
|
||||
client.remove_permission(StatementId='Account2')
|
||||
|
||||
resp = client.describe_event_bus()
|
||||
resp_policy = json.loads(resp['Policy'])
|
||||
assert len(resp_policy['Statement']) == 1
|
||||
assert resp_policy['Statement'][0]['Sid'] == 'Account1'
|
||||
|
||||
|
||||
@mock_events
|
||||
def test_put_events():
|
||||
client = boto3.client('events', 'eu-central-1')
|
||||
|
||||
event = {
|
||||
"Source": "com.mycompany.myapp",
|
||||
"Detail": '{"key1": "value3", "key2": "value4"}',
|
||||
"Resources": ["resource1", "resource2"],
|
||||
"DetailType": "myDetailType"
|
||||
}
|
||||
|
||||
client.put_events(Entries=[event])
|
||||
# Boto3 would error if it didn't return 200 OK
|
||||
|
||||
with assert_raises(ClientError):
|
||||
client.put_events(Entries=[event]*20)
|
||||
|
|
|
|||
|
|
@ -1,21 +1,21 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_and_delete_archive():
|
||||
the_file = NamedTemporaryFile(delete=False)
|
||||
the_file.write(b"some stuff")
|
||||
the_file.close()
|
||||
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
vault = conn.create_vault("my_vault")
|
||||
|
||||
archive_id = vault.upload_archive(the_file.name)
|
||||
|
||||
vault.delete_archive(archive_id)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_and_delete_archive():
|
||||
the_file = NamedTemporaryFile(delete=False)
|
||||
the_file.write(b"some stuff")
|
||||
the_file.close()
|
||||
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
vault = conn.create_vault("my_vault")
|
||||
|
||||
archive_id = vault.upload_archive(the_file.name)
|
||||
|
||||
vault.delete_archive(archive_id)
|
||||
|
|
|
|||
|
|
@ -1,90 +1,90 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from boto.glacier.layer1 import Layer1
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_init_glacier_job():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
job_response['Location'].should.equal(
|
||||
"//vaults/my_vault/jobs/{0}".format(job_id))
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_describe_job():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
|
||||
job = conn.describe_job(vault_name, job_id)
|
||||
joboutput = json.loads(job.read().decode("utf-8"))
|
||||
|
||||
joboutput.should.have.key('Tier').which.should.equal('Standard')
|
||||
joboutput.should.have.key('StatusCode').which.should.equal('InProgress')
|
||||
joboutput.should.have.key('VaultARN').which.should.equal('arn:aws:glacier:RegionInfo:us-west-2:012345678901:vaults/my_vault')
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_list_glacier_jobs():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id1 = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")['ArchiveId']
|
||||
archive_id2 = conn.upload_archive(
|
||||
vault_name, "some other stuff", "", "", "some description")['ArchiveId']
|
||||
|
||||
conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id1,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id2,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
|
||||
jobs = conn.list_jobs(vault_name)
|
||||
len(jobs['JobList']).should.equal(2)
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_get_job_output():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_response = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
archive_id = archive_response['ArchiveId']
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
|
||||
time.sleep(6)
|
||||
|
||||
output = conn.get_job_output(vault_name, job_id)
|
||||
output.read().decode("utf-8").should.equal("some stuff")
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from boto.glacier.layer1 import Layer1
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_init_glacier_job():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
job_response['Location'].should.equal(
|
||||
"//vaults/my_vault/jobs/{0}".format(job_id))
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_describe_job():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
|
||||
job = conn.describe_job(vault_name, job_id)
|
||||
joboutput = json.loads(job.read().decode("utf-8"))
|
||||
|
||||
joboutput.should.have.key('Tier').which.should.equal('Standard')
|
||||
joboutput.should.have.key('StatusCode').which.should.equal('InProgress')
|
||||
joboutput.should.have.key('VaultARN').which.should.equal('arn:aws:glacier:RegionInfo:us-west-2:012345678901:vaults/my_vault')
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_list_glacier_jobs():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_id1 = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")['ArchiveId']
|
||||
archive_id2 = conn.upload_archive(
|
||||
vault_name, "some other stuff", "", "", "some description")['ArchiveId']
|
||||
|
||||
conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id1,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id2,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
|
||||
jobs = conn.list_jobs(vault_name)
|
||||
len(jobs['JobList']).should.equal(2)
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_get_job_output():
|
||||
conn = Layer1(region_name="us-west-2")
|
||||
vault_name = "my_vault"
|
||||
conn.create_vault(vault_name)
|
||||
archive_response = conn.upload_archive(
|
||||
vault_name, "some stuff", "", "", "some description")
|
||||
archive_id = archive_response['ArchiveId']
|
||||
job_response = conn.initiate_job(vault_name, {
|
||||
"ArchiveId": archive_id,
|
||||
"Type": "archive-retrieval",
|
||||
})
|
||||
job_id = job_response['JobId']
|
||||
|
||||
time.sleep(6)
|
||||
|
||||
output = conn.get_job_output(vault_name, job_id)
|
||||
output.read().decode("utf-8").should.equal("some stuff")
|
||||
|
|
|
|||
|
|
@ -1,22 +1,22 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_glacier
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_glacier
|
||||
def test_list_vaults():
|
||||
backend = server.create_backend_app("glacier")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/1234bcd/vaults')
|
||||
|
||||
json.loads(res.data.decode("utf-8")
|
||||
).should.equal({u'Marker': None, u'VaultList': []})
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_glacier
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
@mock_glacier
|
||||
def test_list_vaults():
|
||||
backend = server.create_backend_app("glacier")
|
||||
test_client = backend.test_client()
|
||||
|
||||
res = test_client.get('/1234bcd/vaults')
|
||||
|
||||
json.loads(res.data.decode("utf-8")
|
||||
).should.equal({u'Marker': None, u'VaultList': []})
|
||||
|
|
|
|||
|
|
@ -1,31 +1,31 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
vaults[0].name.should.equal("my_vault")
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_delete_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
|
||||
conn.delete_vault("my_vault")
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(0)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto.glacier
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_glacier_deprecated
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_create_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
vaults[0].name.should.equal("my_vault")
|
||||
|
||||
|
||||
@mock_glacier_deprecated
|
||||
def test_delete_vault():
|
||||
conn = boto.glacier.connect_to_region("us-west-2")
|
||||
|
||||
conn.create_vault("my_vault")
|
||||
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(1)
|
||||
|
||||
conn.delete_vault("my_vault")
|
||||
vaults = conn.list_vaults()
|
||||
vaults.should.have.length_of(0)
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
from __future__ import unicode_literals
|
||||
from __future__ import unicode_literals
|
||||
|
|
|
|||
|
|
@ -1,56 +1,56 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
TABLE_INPUT = {
|
||||
'Owner': 'a_fake_owner',
|
||||
'Parameters': {
|
||||
'EXTERNAL': 'TRUE',
|
||||
},
|
||||
'Retention': 0,
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {
|
||||
'serialization.format': '1'
|
||||
},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
|
||||
},
|
||||
'SkewedInfo': {
|
||||
'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []
|
||||
},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False
|
||||
},
|
||||
'TableType': 'EXTERNAL_TABLE',
|
||||
}
|
||||
|
||||
|
||||
PARTITION_INPUT = {
|
||||
# 'DatabaseName': 'dbname',
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Columns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'Location': 's3://.../partition=value',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {'path': 's3://...', 'serialization.format': '1'},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'},
|
||||
'SkewedInfo': {'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False,
|
||||
},
|
||||
# 'TableName': 'source_table',
|
||||
# 'Values': ['2018-06-26'],
|
||||
}
|
||||
from __future__ import unicode_literals
|
||||
|
||||
TABLE_INPUT = {
|
||||
'Owner': 'a_fake_owner',
|
||||
'Parameters': {
|
||||
'EXTERNAL': 'TRUE',
|
||||
},
|
||||
'Retention': 0,
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {
|
||||
'serialization.format': '1'
|
||||
},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
|
||||
},
|
||||
'SkewedInfo': {
|
||||
'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []
|
||||
},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False
|
||||
},
|
||||
'TableType': 'EXTERNAL_TABLE',
|
||||
}
|
||||
|
||||
|
||||
PARTITION_INPUT = {
|
||||
# 'DatabaseName': 'dbname',
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Columns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'Location': 's3://.../partition=value',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {'path': 's3://...', 'serialization.format': '1'},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'},
|
||||
'SkewedInfo': {'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False,
|
||||
},
|
||||
# 'TableName': 'source_table',
|
||||
# 'Values': ['2018-06-26'],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,119 +1,119 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
return client.create_database(
|
||||
DatabaseInput={
|
||||
'Name': database_name
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input['Name'] = table_name
|
||||
table_input['PartitionKeys'] = partition_keys
|
||||
table_input['StorageDescriptor']['Columns'] = columns
|
||||
table_input['StorageDescriptor']['Location'] = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.create_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input
|
||||
)
|
||||
|
||||
|
||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.update_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input,
|
||||
)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(
|
||||
DatabaseName=database_name,
|
||||
Name=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_tables(client, database_name):
|
||||
return client.get_tables(
|
||||
DatabaseName=database_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_versions(client, database_name, table_name):
|
||||
return client.get_table_versions(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_version(client, database_name, table_name, version_id):
|
||||
return client.get_table_version(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
VersionId=version_id,
|
||||
)
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||
root_path = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input['Values'] = values
|
||||
part_input['StorageDescriptor']['Columns'] = columns
|
||||
part_input['StorageDescriptor']['SerdeInfo']['Parameters']['path'] = root_path
|
||||
return part_input
|
||||
|
||||
|
||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.create_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input
|
||||
)
|
||||
|
||||
|
||||
def update_partition(client, database_name, table_name, old_values=[], partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.update_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input,
|
||||
PartitionValueList=old_values,
|
||||
)
|
||||
|
||||
|
||||
def get_partition(client, database_name, table_name, values):
|
||||
return client.get_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionValues=values,
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
return client.create_database(
|
||||
DatabaseInput={
|
||||
'Name': database_name
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input['Name'] = table_name
|
||||
table_input['PartitionKeys'] = partition_keys
|
||||
table_input['StorageDescriptor']['Columns'] = columns
|
||||
table_input['StorageDescriptor']['Location'] = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.create_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input
|
||||
)
|
||||
|
||||
|
||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.update_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input,
|
||||
)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(
|
||||
DatabaseName=database_name,
|
||||
Name=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_tables(client, database_name):
|
||||
return client.get_tables(
|
||||
DatabaseName=database_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_versions(client, database_name, table_name):
|
||||
return client.get_table_versions(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_version(client, database_name, table_name, version_id):
|
||||
return client.get_table_version(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
VersionId=version_id,
|
||||
)
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||
root_path = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input['Values'] = values
|
||||
part_input['StorageDescriptor']['Columns'] = columns
|
||||
part_input['StorageDescriptor']['SerdeInfo']['Parameters']['path'] = root_path
|
||||
return part_input
|
||||
|
||||
|
||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.create_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input
|
||||
)
|
||||
|
||||
|
||||
def update_partition(client, database_name, table_name, old_values=[], partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.update_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input,
|
||||
PartitionValueList=old_values,
|
||||
)
|
||||
|
||||
|
||||
def get_partition(client, database_name, table_name, values):
|
||||
return client.get_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionValues=values,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,426 +1,426 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
import re
|
||||
from nose.tools import assert_raises
|
||||
import boto3
|
||||
from botocore.client import ClientError
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
from moto import mock_glue
|
||||
from . import helpers
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
response = helpers.get_database(client, database_name)
|
||||
database = response['Database']
|
||||
|
||||
database.should.equal({'Name': database_name})
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'cantcreatethisdatabasetwice'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myspecialtable'
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_table(client, database_name, table_name)
|
||||
table = response['Table']
|
||||
|
||||
table['Name'].should.equal(table_input['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_input['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_input['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'cantcreatethistabletwice'
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_tables():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_names = ['myfirsttable', 'mysecondtable', 'mythirdtable']
|
||||
table_inputs = {}
|
||||
|
||||
for table_name in table_names:
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
table_inputs[table_name] = table_input
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_tables(client, database_name)
|
||||
|
||||
tables = response['TableList']
|
||||
|
||||
tables.should.have.length_of(3)
|
||||
|
||||
for table in tables:
|
||||
table_name = table['Name']
|
||||
table_name.should.equal(table_inputs[table_name]['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_inputs[table_name]['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_inputs[table_name]['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_versions():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myfirsttable'
|
||||
version_inputs = {}
|
||||
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
version_inputs["1"] = table_input
|
||||
|
||||
columns = [{'Name': 'country', 'Type': 'string'}]
|
||||
table_input = helpers.create_table_input(database_name, table_name, columns=columns)
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["2"] = table_input
|
||||
|
||||
# Updateing with an indentical input should still create a new version
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["3"] = table_input
|
||||
|
||||
response = helpers.get_table_versions(client, database_name, table_name)
|
||||
|
||||
vers = response['TableVersions']
|
||||
|
||||
vers.should.have.length_of(3)
|
||||
vers[0]['Table']['StorageDescriptor']['Columns'].should.equal([])
|
||||
vers[-1]['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
for n, ver in enumerate(vers):
|
||||
n = str(n + 1)
|
||||
ver['VersionId'].should.equal(n)
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor'].should.equal(version_inputs[n]['StorageDescriptor'])
|
||||
ver['Table']['PartitionKeys'].should.equal(version_inputs[n]['PartitionKeys'])
|
||||
|
||||
response = helpers.get_table_version(client, database_name, table_name, "3")
|
||||
ver = response['TableVersion']
|
||||
|
||||
ver['VersionId'].should.equal("3")
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "20")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('version', re.I)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_invalid_input():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "10not-an-int")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidInputException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Table myfirsttable not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_when_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partitions_empty():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
response['Partitions'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
before = datetime.now(pytz.utc)
|
||||
|
||||
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||
helpers.create_partition(client, database_name, table_name, part_input)
|
||||
|
||||
after = datetime.now(pytz.utc)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
partitions = response['Partitions']
|
||||
|
||||
partitions.should.have.length_of(1)
|
||||
|
||||
partition = partitions[0]
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor'].should.equal(part_input['StorageDescriptor'])
|
||||
partition['Values'].should.equal(values)
|
||||
partition['CreationTime'].should.be.greater_than(before)
|
||||
partition['CreationTime'].should.be.lower_than(after)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition_already_exist():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values[1])
|
||||
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['Values'].should.equal(values[1])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_moving():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=['0000-00-00'], values=['2018-10-02'])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_change_in_place():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_cannot_overwrite():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values[0], values=values[1])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_move():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
new_values = ['2018-09-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=new_values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
# Old partition shouldn't exist anymore
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=new_values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
import re
|
||||
from nose.tools import assert_raises
|
||||
import boto3
|
||||
from botocore.client import ClientError
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
from moto import mock_glue
|
||||
from . import helpers
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
response = helpers.get_database(client, database_name)
|
||||
database = response['Database']
|
||||
|
||||
database.should.equal({'Name': database_name})
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_database_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'cantcreatethisdatabasetwice'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myspecialtable'
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_table(client, database_name, table_name)
|
||||
table = response['Table']
|
||||
|
||||
table['Name'].should.equal(table_input['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_input['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_input['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_table_already_exists():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'cantcreatethistabletwice'
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_tables():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_names = ['myfirsttable', 'mysecondtable', 'mythirdtable']
|
||||
table_inputs = {}
|
||||
|
||||
for table_name in table_names:
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
table_inputs[table_name] = table_input
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_tables(client, database_name)
|
||||
|
||||
tables = response['TableList']
|
||||
|
||||
tables.should.have.length_of(3)
|
||||
|
||||
for table in tables:
|
||||
table_name = table['Name']
|
||||
table_name.should.equal(table_inputs[table_name]['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_inputs[table_name]['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_inputs[table_name]['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_versions():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myfirsttable'
|
||||
version_inputs = {}
|
||||
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
version_inputs["1"] = table_input
|
||||
|
||||
columns = [{'Name': 'country', 'Type': 'string'}]
|
||||
table_input = helpers.create_table_input(database_name, table_name, columns=columns)
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["2"] = table_input
|
||||
|
||||
# Updateing with an indentical input should still create a new version
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["3"] = table_input
|
||||
|
||||
response = helpers.get_table_versions(client, database_name, table_name)
|
||||
|
||||
vers = response['TableVersions']
|
||||
|
||||
vers.should.have.length_of(3)
|
||||
vers[0]['Table']['StorageDescriptor']['Columns'].should.equal([])
|
||||
vers[-1]['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
for n, ver in enumerate(vers):
|
||||
n = str(n + 1)
|
||||
ver['VersionId'].should.equal(n)
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor'].should.equal(version_inputs[n]['StorageDescriptor'])
|
||||
ver['Table']['PartitionKeys'].should.equal(version_inputs[n]['PartitionKeys'])
|
||||
|
||||
response = helpers.get_table_version(client, database_name, table_name, "3")
|
||||
ver = response['TableVersion']
|
||||
|
||||
ver['VersionId'].should.equal("3")
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "20")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('version', re.I)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_invalid_input():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "10not-an-int")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidInputException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Table myfirsttable not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_when_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partitions_empty():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
response['Partitions'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
before = datetime.now(pytz.utc)
|
||||
|
||||
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||
helpers.create_partition(client, database_name, table_name, part_input)
|
||||
|
||||
after = datetime.now(pytz.utc)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
partitions = response['Partitions']
|
||||
|
||||
partitions.should.have.length_of(1)
|
||||
|
||||
partition = partitions[0]
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor'].should.equal(part_input['StorageDescriptor'])
|
||||
partition['Values'].should.equal(values)
|
||||
partition['CreationTime'].should.be.greater_than(before)
|
||||
partition['CreationTime'].should.be.lower_than(after)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition_already_exist():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values[1])
|
||||
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['Values'].should.equal(values[1])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_moving():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=['0000-00-00'], values=['2018-10-02'])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_change_in_place():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_cannot_overwrite():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values[0], values=values[1])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_move():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
new_values = ['2018-09-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=new_values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
# Old partition shouldn't exist anymore
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=new_values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,20 +1,20 @@
|
|||
import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_iam
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_account_aliases():
|
||||
client = boto3.client('iam', region_name='us-east-1')
|
||||
|
||||
alias = 'my-account-name'
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([])
|
||||
|
||||
client.create_account_alias(AccountAlias=alias)
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([alias])
|
||||
|
||||
client.delete_account_alias(AccountAlias=alias)
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([])
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from moto import mock_iam
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_account_aliases():
|
||||
client = boto3.client('iam', region_name='us-east-1')
|
||||
|
||||
alias = 'my-account-name'
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([])
|
||||
|
||||
client.create_account_alias(AccountAlias=alias)
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([alias])
|
||||
|
||||
client.delete_account_alias(AccountAlias=alias)
|
||||
aliases = client.list_account_aliases()
|
||||
aliases.should.have.key('AccountAliases').which.should.equal([])
|
||||
|
|
|
|||
|
|
@ -1,155 +1,155 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from nose.tools import assert_raises
|
||||
from boto.exception import BotoServerError
|
||||
from moto import mock_iam, mock_iam_deprecated
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_create_group():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.create_group('my-group')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_group():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
conn.get_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.get_group('not-group')
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_get_group_current():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
result = conn.get_group(GroupName='my-group')
|
||||
|
||||
assert result['Group']['Path'] == '/'
|
||||
assert result['Group']['GroupName'] == 'my-group'
|
||||
assert isinstance(result['Group']['CreateDate'], datetime)
|
||||
assert result['Group']['GroupId']
|
||||
assert result['Group']['Arn'] == 'arn:aws:iam::123456789012:group/my-group'
|
||||
assert not result['Users']
|
||||
|
||||
# Make a group with a different path:
|
||||
other_group = conn.create_group(GroupName='my-other-group', Path='some/location')
|
||||
assert other_group['Group']['Path'] == 'some/location'
|
||||
assert other_group['Group']['Arn'] == 'arn:aws:iam::123456789012:group/some/location/my-other-group'
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_all_groups():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group1')
|
||||
conn.create_group('my-group2')
|
||||
groups = conn.get_all_groups()['list_groups_response'][
|
||||
'list_groups_result']['groups']
|
||||
groups.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_add_user_to_group():
|
||||
conn = boto.connect_iam()
|
||||
with assert_raises(BotoServerError):
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.create_user('my-user')
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_remove_user_from_group():
|
||||
conn = boto.connect_iam()
|
||||
with assert_raises(BotoServerError):
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
conn.create_group('my-group')
|
||||
conn.create_user('my-user')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_groups_for_user():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group1')
|
||||
conn.create_group('my-group2')
|
||||
conn.create_group('other-group')
|
||||
conn.create_user('my-user')
|
||||
conn.add_user_to_group('my-group1', 'my-user')
|
||||
conn.add_user_to_group('my-group2', 'my-user')
|
||||
|
||||
groups = conn.get_groups_for_user(
|
||||
'my-user')['list_groups_for_user_response']['list_groups_for_user_result']['groups']
|
||||
groups.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_put_group_policy():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
|
||||
|
||||
@mock_iam
|
||||
def test_attach_group_policies():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
policy_arn = 'arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceforEC2Role'
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
conn.attach_group_policy(GroupName='my-group', PolicyArn=policy_arn)
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.equal(
|
||||
[
|
||||
{
|
||||
'PolicyName': 'AmazonElasticMapReduceforEC2Role',
|
||||
'PolicyArn': policy_arn,
|
||||
}
|
||||
])
|
||||
|
||||
conn.detach_group_policy(GroupName='my-group', PolicyArn=policy_arn)
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_group_policy():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.get_group_policy('my-group', 'my-policy')
|
||||
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
conn.get_group_policy('my-group', 'my-policy')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_all_group_policies():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
|
||||
assert policies == []
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
|
||||
assert policies == ['my-policy']
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_list_group_policies():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.be.empty
|
||||
conn.put_group_policy(GroupName='my-group', PolicyName='my-policy', PolicyDocument='{"some": "json"}')
|
||||
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.equal(['my-policy'])
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from nose.tools import assert_raises
|
||||
from boto.exception import BotoServerError
|
||||
from moto import mock_iam, mock_iam_deprecated
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_create_group():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.create_group('my-group')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_group():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
conn.get_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.get_group('not-group')
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_get_group_current():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
result = conn.get_group(GroupName='my-group')
|
||||
|
||||
assert result['Group']['Path'] == '/'
|
||||
assert result['Group']['GroupName'] == 'my-group'
|
||||
assert isinstance(result['Group']['CreateDate'], datetime)
|
||||
assert result['Group']['GroupId']
|
||||
assert result['Group']['Arn'] == 'arn:aws:iam::123456789012:group/my-group'
|
||||
assert not result['Users']
|
||||
|
||||
# Make a group with a different path:
|
||||
other_group = conn.create_group(GroupName='my-other-group', Path='some/location')
|
||||
assert other_group['Group']['Path'] == 'some/location'
|
||||
assert other_group['Group']['Arn'] == 'arn:aws:iam::123456789012:group/some/location/my-other-group'
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_all_groups():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group1')
|
||||
conn.create_group('my-group2')
|
||||
groups = conn.get_all_groups()['list_groups_response'][
|
||||
'list_groups_result']['groups']
|
||||
groups.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_add_user_to_group():
|
||||
conn = boto.connect_iam()
|
||||
with assert_raises(BotoServerError):
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.create_user('my-user')
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_remove_user_from_group():
|
||||
conn = boto.connect_iam()
|
||||
with assert_raises(BotoServerError):
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
conn.create_group('my-group')
|
||||
conn.create_user('my-user')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
conn.add_user_to_group('my-group', 'my-user')
|
||||
conn.remove_user_from_group('my-group', 'my-user')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_groups_for_user():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group1')
|
||||
conn.create_group('my-group2')
|
||||
conn.create_group('other-group')
|
||||
conn.create_user('my-user')
|
||||
conn.add_user_to_group('my-group1', 'my-user')
|
||||
conn.add_user_to_group('my-group2', 'my-user')
|
||||
|
||||
groups = conn.get_groups_for_user(
|
||||
'my-user')['list_groups_for_user_response']['list_groups_for_user_result']['groups']
|
||||
groups.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_put_group_policy():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
|
||||
|
||||
@mock_iam
|
||||
def test_attach_group_policies():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
policy_arn = 'arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceforEC2Role'
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
conn.attach_group_policy(GroupName='my-group', PolicyArn=policy_arn)
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.equal(
|
||||
[
|
||||
{
|
||||
'PolicyName': 'AmazonElasticMapReduceforEC2Role',
|
||||
'PolicyArn': policy_arn,
|
||||
}
|
||||
])
|
||||
|
||||
conn.detach_group_policy(GroupName='my-group', PolicyArn=policy_arn)
|
||||
conn.list_attached_group_policies(GroupName='my-group')['AttachedPolicies'].should.be.empty
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_group_policy():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
with assert_raises(BotoServerError):
|
||||
conn.get_group_policy('my-group', 'my-policy')
|
||||
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
conn.get_group_policy('my-group', 'my-policy')
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_all_group_policies():
|
||||
conn = boto.connect_iam()
|
||||
conn.create_group('my-group')
|
||||
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
|
||||
assert policies == []
|
||||
conn.put_group_policy('my-group', 'my-policy', '{"some": "json"}')
|
||||
policies = conn.get_all_group_policies('my-group')['list_group_policies_response']['list_group_policies_result']['policy_names']
|
||||
assert policies == ['my-policy']
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_list_group_policies():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.be.empty
|
||||
conn.put_group_policy(GroupName='my-group', PolicyName='my-policy', PolicyDocument='{"some": "json"}')
|
||||
conn.list_group_policies(GroupName='my-group')['PolicyNames'].should.equal(['my-policy'])
|
||||
|
|
|
|||
|
|
@ -1,26 +1,26 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_iam_server_get():
|
||||
backend = server.create_backend_app("iam")
|
||||
test_client = backend.test_client()
|
||||
|
||||
group_data = test_client.action_data(
|
||||
"CreateGroup", GroupName="test group", Path="/")
|
||||
group_id = re.search("<GroupId>(.*)</GroupId>", group_data).groups()[0]
|
||||
|
||||
groups_data = test_client.action_data("ListGroups")
|
||||
groups_ids = re.findall("<GroupId>(.*)</GroupId>", groups_data)
|
||||
|
||||
assert group_id in groups_ids
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
import re
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
|
||||
def test_iam_server_get():
|
||||
backend = server.create_backend_app("iam")
|
||||
test_client = backend.test_client()
|
||||
|
||||
group_data = test_client.action_data(
|
||||
"CreateGroup", GroupName="test group", Path="/")
|
||||
group_id = re.search("<GroupId>(.*)</GroupId>", group_data).groups()[0]
|
||||
|
||||
groups_data = test_client.action_data("ListGroups")
|
||||
groups_ids = re.findall("<GroupId>(.*)</GroupId>", groups_data)
|
||||
|
||||
assert group_id in groups_ids
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,19 +1,19 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_iot
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
@mock_iot
|
||||
def test_iot_list():
|
||||
backend = server.create_backend_app("iot")
|
||||
test_client = backend.test_client()
|
||||
|
||||
# just making sure that server is up
|
||||
res = test_client.get('/things')
|
||||
res.status_code.should.equal(404)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_iot
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
@mock_iot
|
||||
def test_iot_list():
|
||||
backend = server.create_backend_app("iot")
|
||||
test_client = backend.test_client()
|
||||
|
||||
# just making sure that server is up
|
||||
res = test_client.get('/things')
|
||||
res.status_code.should.equal(404)
|
||||
|
|
|
|||
|
|
@ -1,93 +1,93 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_iotdata, mock_iot
|
||||
|
||||
|
||||
@mock_iot
|
||||
@mock_iotdata
|
||||
def test_basic():
|
||||
iot_client = boto3.client('iot', region_name='ap-northeast-1')
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
name = 'my-thing'
|
||||
raw_payload = b'{"state": {"desired": {"led": "on"}}}'
|
||||
iot_client.create_thing(thingName=name)
|
||||
|
||||
with assert_raises(ClientError):
|
||||
client.get_thing_shadow(thingName=name)
|
||||
|
||||
res = client.update_thing_shadow(thingName=name, payload=raw_payload)
|
||||
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"desired": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "delta": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
client.delete_thing_shadow(thingName=name)
|
||||
with assert_raises(ClientError):
|
||||
client.get_thing_shadow(thingName=name)
|
||||
|
||||
|
||||
@mock_iot
|
||||
@mock_iotdata
|
||||
def test_update():
|
||||
iot_client = boto3.client('iot', region_name='ap-northeast-1')
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
name = 'my-thing'
|
||||
raw_payload = b'{"state": {"desired": {"led": "on"}}}'
|
||||
iot_client.create_thing(thingName=name)
|
||||
|
||||
# first update
|
||||
res = client.update_thing_shadow(thingName=name, payload=raw_payload)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"desired": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "delta": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
# reporting new state
|
||||
new_payload = b'{"state": {"reported": {"led": "on"}}}'
|
||||
res = client.update_thing_shadow(thingName=name, payload=new_payload)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"reported": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('reported').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(2)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "reported": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(2)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
|
||||
@mock_iotdata
|
||||
def test_publish():
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
client.publish(topic='test/topic', qos=1, payload=b'')
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from nose.tools import assert_raises
|
||||
from botocore.exceptions import ClientError
|
||||
from moto import mock_iotdata, mock_iot
|
||||
|
||||
|
||||
@mock_iot
|
||||
@mock_iotdata
|
||||
def test_basic():
|
||||
iot_client = boto3.client('iot', region_name='ap-northeast-1')
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
name = 'my-thing'
|
||||
raw_payload = b'{"state": {"desired": {"led": "on"}}}'
|
||||
iot_client.create_thing(thingName=name)
|
||||
|
||||
with assert_raises(ClientError):
|
||||
client.get_thing_shadow(thingName=name)
|
||||
|
||||
res = client.update_thing_shadow(thingName=name, payload=raw_payload)
|
||||
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"desired": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "delta": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
client.delete_thing_shadow(thingName=name)
|
||||
with assert_raises(ClientError):
|
||||
client.get_thing_shadow(thingName=name)
|
||||
|
||||
|
||||
@mock_iot
|
||||
@mock_iotdata
|
||||
def test_update():
|
||||
iot_client = boto3.client('iot', region_name='ap-northeast-1')
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
name = 'my-thing'
|
||||
raw_payload = b'{"state": {"desired": {"led": "on"}}}'
|
||||
iot_client.create_thing(thingName=name)
|
||||
|
||||
# first update
|
||||
res = client.update_thing_shadow(thingName=name, payload=raw_payload)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"desired": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "delta": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(1)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
# reporting new state
|
||||
new_payload = b'{"state": {"reported": {"led": "on"}}}'
|
||||
res = client.update_thing_shadow(thingName=name, payload=new_payload)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = '{"reported": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('reported').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(2)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
res = client.get_thing_shadow(thingName=name)
|
||||
payload = json.loads(res['payload'].read())
|
||||
expected_state = b'{"desired": {"led": "on"}, "reported": {"led": "on"}}'
|
||||
payload.should.have.key('state').which.should.equal(json.loads(expected_state))
|
||||
payload.should.have.key('metadata').which.should.have.key('desired').which.should.have.key('led')
|
||||
payload.should.have.key('version').which.should.equal(2)
|
||||
payload.should.have.key('timestamp')
|
||||
|
||||
|
||||
@mock_iotdata
|
||||
def test_publish():
|
||||
client = boto3.client('iot-data', region_name='ap-northeast-1')
|
||||
client.publish(topic='test/topic', qos=1, payload=b'')
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_iotdata
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
@mock_iotdata
|
||||
def test_iotdata_list():
|
||||
backend = server.create_backend_app("iot-data")
|
||||
test_client = backend.test_client()
|
||||
|
||||
# just making sure that server is up
|
||||
thing_name = 'nothing'
|
||||
res = test_client.get('/things/{}/shadow'.format(thing_name))
|
||||
res.status_code.should.equal(404)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
from moto import mock_iotdata
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
|
||||
@mock_iotdata
|
||||
def test_iotdata_list():
|
||||
backend = server.create_backend_app("iot-data")
|
||||
test_client = backend.test_client()
|
||||
|
||||
# just making sure that server is up
|
||||
thing_name = 'nothing'
|
||||
res = test_client.get('/things/{}/shadow'.format(thing_name))
|
||||
res.status_code.should.equal(404)
|
||||
|
|
|
|||
|
|
@ -1,188 +1,188 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_kinesis
|
||||
|
||||
|
||||
def create_stream(client, stream_name):
|
||||
return client.create_delivery_stream(
|
||||
DeliveryStreamName=stream_name,
|
||||
RedshiftDestinationConfiguration={
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'ClusterJDBCURL': 'jdbc:redshift://host.amazonaws.com:5439/database',
|
||||
'CopyCommand': {
|
||||
'DataTableName': 'outputTable',
|
||||
'CopyOptions': "CSV DELIMITER ',' NULL '\\0'"
|
||||
},
|
||||
'Username': 'username',
|
||||
'Password': 'password',
|
||||
'S3Configuration': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_create_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
response = create_stream(client, 'stream1')
|
||||
stream_arn = response['DeliveryStreamARN']
|
||||
|
||||
response = client.describe_delivery_stream(DeliveryStreamName='stream1')
|
||||
stream_description = response['DeliveryStreamDescription']
|
||||
|
||||
# Sure and Freezegun don't play nicely together
|
||||
_ = stream_description.pop('CreateTimestamp')
|
||||
_ = stream_description.pop('LastUpdateTimestamp')
|
||||
|
||||
stream_description.should.equal({
|
||||
'DeliveryStreamName': 'stream1',
|
||||
'DeliveryStreamARN': stream_arn,
|
||||
'DeliveryStreamStatus': 'ACTIVE',
|
||||
'VersionId': 'string',
|
||||
'Destinations': [
|
||||
{
|
||||
'DestinationId': 'string',
|
||||
'RedshiftDestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'ClusterJDBCURL': 'jdbc:redshift://host.amazonaws.com:5439/database',
|
||||
'CopyCommand': {
|
||||
'DataTableName': 'outputTable',
|
||||
'CopyOptions': "CSV DELIMITER ',' NULL '\\0'"
|
||||
},
|
||||
'Username': 'username',
|
||||
'S3DestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"HasMoreDestinations": False,
|
||||
})
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_create_stream_without_redshift():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
response = client.create_delivery_stream(
|
||||
DeliveryStreamName="stream1",
|
||||
S3DestinationConfiguration={
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
)
|
||||
stream_arn = response['DeliveryStreamARN']
|
||||
|
||||
response = client.describe_delivery_stream(DeliveryStreamName='stream1')
|
||||
stream_description = response['DeliveryStreamDescription']
|
||||
|
||||
# Sure and Freezegun don't play nicely together
|
||||
_ = stream_description.pop('CreateTimestamp')
|
||||
_ = stream_description.pop('LastUpdateTimestamp')
|
||||
|
||||
stream_description.should.equal({
|
||||
'DeliveryStreamName': 'stream1',
|
||||
'DeliveryStreamARN': stream_arn,
|
||||
'DeliveryStreamStatus': 'ACTIVE',
|
||||
'VersionId': 'string',
|
||||
'Destinations': [
|
||||
{
|
||||
'DestinationId': 'string',
|
||||
'S3DestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
},
|
||||
],
|
||||
"HasMoreDestinations": False,
|
||||
})
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_deescribe_non_existant_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
client.describe_delivery_stream.when.called_with(
|
||||
DeliveryStreamName='not-a-stream').should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_list_and_delete_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
create_stream(client, 'stream2')
|
||||
|
||||
set(client.list_delivery_streams()['DeliveryStreamNames']).should.equal(
|
||||
set(['stream1', 'stream2']))
|
||||
|
||||
client.delete_delivery_stream(DeliveryStreamName='stream1')
|
||||
|
||||
set(client.list_delivery_streams()[
|
||||
'DeliveryStreamNames']).should.equal(set(['stream2']))
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_put_record():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
client.put_record(
|
||||
DeliveryStreamName='stream1',
|
||||
Record={
|
||||
'Data': 'some data'
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_put_record_batch():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
client.put_record_batch(
|
||||
DeliveryStreamName='stream1',
|
||||
Records=[
|
||||
{
|
||||
'Data': 'some data1'
|
||||
},
|
||||
{
|
||||
'Data': 'some data2'
|
||||
},
|
||||
]
|
||||
)
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
import boto3
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_kinesis
|
||||
|
||||
|
||||
def create_stream(client, stream_name):
|
||||
return client.create_delivery_stream(
|
||||
DeliveryStreamName=stream_name,
|
||||
RedshiftDestinationConfiguration={
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'ClusterJDBCURL': 'jdbc:redshift://host.amazonaws.com:5439/database',
|
||||
'CopyCommand': {
|
||||
'DataTableName': 'outputTable',
|
||||
'CopyOptions': "CSV DELIMITER ',' NULL '\\0'"
|
||||
},
|
||||
'Username': 'username',
|
||||
'Password': 'password',
|
||||
'S3Configuration': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_create_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
response = create_stream(client, 'stream1')
|
||||
stream_arn = response['DeliveryStreamARN']
|
||||
|
||||
response = client.describe_delivery_stream(DeliveryStreamName='stream1')
|
||||
stream_description = response['DeliveryStreamDescription']
|
||||
|
||||
# Sure and Freezegun don't play nicely together
|
||||
_ = stream_description.pop('CreateTimestamp')
|
||||
_ = stream_description.pop('LastUpdateTimestamp')
|
||||
|
||||
stream_description.should.equal({
|
||||
'DeliveryStreamName': 'stream1',
|
||||
'DeliveryStreamARN': stream_arn,
|
||||
'DeliveryStreamStatus': 'ACTIVE',
|
||||
'VersionId': 'string',
|
||||
'Destinations': [
|
||||
{
|
||||
'DestinationId': 'string',
|
||||
'RedshiftDestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'ClusterJDBCURL': 'jdbc:redshift://host.amazonaws.com:5439/database',
|
||||
'CopyCommand': {
|
||||
'DataTableName': 'outputTable',
|
||||
'CopyOptions': "CSV DELIMITER ',' NULL '\\0'"
|
||||
},
|
||||
'Username': 'username',
|
||||
'S3DestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"HasMoreDestinations": False,
|
||||
})
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_create_stream_without_redshift():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
response = client.create_delivery_stream(
|
||||
DeliveryStreamName="stream1",
|
||||
S3DestinationConfiguration={
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
)
|
||||
stream_arn = response['DeliveryStreamARN']
|
||||
|
||||
response = client.describe_delivery_stream(DeliveryStreamName='stream1')
|
||||
stream_description = response['DeliveryStreamDescription']
|
||||
|
||||
# Sure and Freezegun don't play nicely together
|
||||
_ = stream_description.pop('CreateTimestamp')
|
||||
_ = stream_description.pop('LastUpdateTimestamp')
|
||||
|
||||
stream_description.should.equal({
|
||||
'DeliveryStreamName': 'stream1',
|
||||
'DeliveryStreamARN': stream_arn,
|
||||
'DeliveryStreamStatus': 'ACTIVE',
|
||||
'VersionId': 'string',
|
||||
'Destinations': [
|
||||
{
|
||||
'DestinationId': 'string',
|
||||
'S3DestinationDescription': {
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
|
||||
'BucketARN': 'arn:aws:s3:::kinesis-test',
|
||||
'Prefix': 'myFolder/',
|
||||
'BufferingHints': {
|
||||
'SizeInMBs': 123,
|
||||
'IntervalInSeconds': 124
|
||||
},
|
||||
'CompressionFormat': 'UNCOMPRESSED',
|
||||
}
|
||||
},
|
||||
],
|
||||
"HasMoreDestinations": False,
|
||||
})
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_deescribe_non_existant_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
client.describe_delivery_stream.when.called_with(
|
||||
DeliveryStreamName='not-a-stream').should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_list_and_delete_stream():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
create_stream(client, 'stream2')
|
||||
|
||||
set(client.list_delivery_streams()['DeliveryStreamNames']).should.equal(
|
||||
set(['stream1', 'stream2']))
|
||||
|
||||
client.delete_delivery_stream(DeliveryStreamName='stream1')
|
||||
|
||||
set(client.list_delivery_streams()[
|
||||
'DeliveryStreamNames']).should.equal(set(['stream2']))
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_put_record():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
client.put_record(
|
||||
DeliveryStreamName='stream1',
|
||||
Record={
|
||||
'Data': 'some data'
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@mock_kinesis
|
||||
def test_put_record_batch():
|
||||
client = boto3.client('firehose', region_name='us-east-1')
|
||||
|
||||
create_stream(client, 'stream1')
|
||||
client.put_record_batch(
|
||||
DeliveryStreamName='stream1',
|
||||
Records=[
|
||||
{
|
||||
'Data': 'some data1'
|
||||
},
|
||||
{
|
||||
'Data': 'some data2'
|
||||
},
|
||||
]
|
||||
)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue