moto/moto/datapipeline/models.py
jweite 49d92861c0
Iam cloudformation update, singificant cloudformation refactoring (#3218)
* IAM User Cloudformation Enhancements: update, delete, getatt.

* AWS::IAM::Policy Support

* Added unit tests for AWS:IAM:Policy for roles and groups.  Fixed bug related to groups.

* AWS:IAM:AccessKey CloudFormation support.

* Refactor of CloudFormation parsing.py methods to simplify and standardize how they call to the models.  Adjusted some models accordingly.

* Further model CloudFormation support changes to align with revised CloudFormation logic.  Mostly avoidance of getting resoure name from properties.

* Support for Kinesis Stream RetentionPeriodHours param.

* Kinesis Stream Cloudformation Tag Support.

* Added omitted 'region' param to boto3.client() calls in new tests.

Co-authored-by: Joseph Weitekamp <jweite@amazon.com>
2020-08-27 10:11:47 +01:00

162 lines
5.4 KiB
Python

from __future__ import unicode_literals
import datetime
from boto3 import Session
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel, CloudFormationModel
from .utils import get_random_pipeline_id, remove_capitalization_of_dict_keys
class PipelineObject(BaseModel):
def __init__(self, object_id, name, fields):
self.object_id = object_id
self.name = name
self.fields = fields
def to_json(self):
return {"fields": self.fields, "id": self.object_id, "name": self.name}
class Pipeline(CloudFormationModel):
def __init__(self, name, unique_id, **kwargs):
self.name = name
self.unique_id = unique_id
self.description = kwargs.get("description", "")
self.pipeline_id = get_random_pipeline_id()
self.creation_time = datetime.datetime.utcnow()
self.objects = []
self.status = "PENDING"
self.tags = kwargs.get("tags", [])
@property
def physical_resource_id(self):
return self.pipeline_id
def to_meta_json(self):
return {"id": self.pipeline_id, "name": self.name}
def to_json(self):
return {
"description": self.description,
"fields": [
{"key": "@pipelineState", "stringValue": self.status},
{"key": "description", "stringValue": self.description},
{"key": "name", "stringValue": self.name},
{
"key": "@creationTime",
"stringValue": datetime.datetime.strftime(
self.creation_time, "%Y-%m-%dT%H-%M-%S"
),
},
{"key": "@id", "stringValue": self.pipeline_id},
{"key": "@sphere", "stringValue": "PIPELINE"},
{"key": "@version", "stringValue": "1"},
{"key": "@userId", "stringValue": "924374875933"},
{"key": "@accountId", "stringValue": "924374875933"},
{"key": "uniqueId", "stringValue": self.unique_id},
],
"name": self.name,
"pipelineId": self.pipeline_id,
"tags": self.tags,
}
def set_pipeline_objects(self, pipeline_objects):
self.objects = [
PipelineObject(
pipeline_object["id"],
pipeline_object["name"],
pipeline_object["fields"],
)
for pipeline_object in remove_capitalization_of_dict_keys(pipeline_objects)
]
def activate(self):
self.status = "SCHEDULED"
@staticmethod
def cloudformation_name_type():
return "Name"
@staticmethod
def cloudformation_type():
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html
return "AWS::DataPipeline::Pipeline"
@classmethod
def create_from_cloudformation_json(
cls, resource_name, cloudformation_json, region_name
):
datapipeline_backend = datapipeline_backends[region_name]
properties = cloudformation_json["Properties"]
cloudformation_unique_id = "cf-" + resource_name
pipeline = datapipeline_backend.create_pipeline(
resource_name, cloudformation_unique_id
)
datapipeline_backend.put_pipeline_definition(
pipeline.pipeline_id, properties["PipelineObjects"]
)
if properties["Activate"]:
pipeline.activate()
return pipeline
class DataPipelineBackend(BaseBackend):
def __init__(self):
self.pipelines = OrderedDict()
def create_pipeline(self, name, unique_id, **kwargs):
pipeline = Pipeline(name, unique_id, **kwargs)
self.pipelines[pipeline.pipeline_id] = pipeline
return pipeline
def list_pipelines(self):
return self.pipelines.values()
def describe_pipelines(self, pipeline_ids):
pipelines = [
pipeline
for pipeline in self.pipelines.values()
if pipeline.pipeline_id in pipeline_ids
]
return pipelines
def get_pipeline(self, pipeline_id):
return self.pipelines[pipeline_id]
def delete_pipeline(self, pipeline_id):
self.pipelines.pop(pipeline_id, None)
def put_pipeline_definition(self, pipeline_id, pipeline_objects):
pipeline = self.get_pipeline(pipeline_id)
pipeline.set_pipeline_objects(pipeline_objects)
def get_pipeline_definition(self, pipeline_id):
pipeline = self.get_pipeline(pipeline_id)
return pipeline.objects
def describe_objects(self, object_ids, pipeline_id):
pipeline = self.get_pipeline(pipeline_id)
pipeline_objects = [
pipeline_object
for pipeline_object in pipeline.objects
if pipeline_object.object_id in object_ids
]
return pipeline_objects
def activate_pipeline(self, pipeline_id):
pipeline = self.get_pipeline(pipeline_id)
pipeline.activate()
datapipeline_backends = {}
for region in Session().get_available_regions("datapipeline"):
datapipeline_backends[region] = DataPipelineBackend()
for region in Session().get_available_regions(
"datapipeline", partition_name="aws-us-gov"
):
datapipeline_backends[region] = DataPipelineBackend()
for region in Session().get_available_regions("datapipeline", partition_name="aws-cn"):
datapipeline_backends[region] = DataPipelineBackend(region)