From f5ab5509a285b48602d7f89ca23a502f83d82f24 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Thu, 24 Jan 2019 23:02:05 +0100 Subject: [PATCH 01/62] Add types --- troposphere-gen/types.py | 84 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 troposphere-gen/types.py diff --git a/troposphere-gen/types.py b/troposphere-gen/types.py new file mode 100644 index 000000000..5f9cd5856 --- /dev/null +++ b/troposphere-gen/types.py @@ -0,0 +1,84 @@ +"""Types for Properties used in specification + +Documentation: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-resource-specification-format.html + +Type classes take in type string from JSON specifications and outputs the +correct python-type corresponding to the specification-type. Subproperties +are resolved by the parser and the actual generated python class is filled in. +""" + +from typing import Union + +type_map = { + "String": str, + "Long": int, + "Integer": int, + "Double": float, + "Boolean": bool, + "Timestamp": str, + "Json": dict +} + + +class BaseType(): + """Base Type all types inherit from""" + + def __init__(self): + raise NotImplementedError + + +class PrimitiveType(BaseType): + """Primitive type + + Primitive types are String, Long, Integer, Double, Boolean, or Timestamp. + """ + + def __init__(self, type: str): + super(PrimitiveType, self).__init__() + if type in type_map: + self.type = type_map[type] # type: type + else: + raise ValueError("Unknown type: %s" % type) + + def __str__(self) -> str: + return self.type.__name__ + + +class Subproperty(BaseType): + """Subproperty type defined in other part of specification""" + + def __init__(self, type: str) -> None: + super(Subproperty, self).__init__() + self.type = type # type: str + self.print_class = None # type: type + + def __str__(self) -> str: + return self.print_class.__name__ + + +class Map(BaseType): + """Map type + + Map of subproperties or primitives. The keys are always strings. + """ + + def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: + super(Map, self).__init__() + self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] + + def __str__(self) -> str: + return "Dict[str, %s]" % self.itemtype + + +class List(BaseType): + """List type + + List of subproperties or primitives. + """ + + def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: + super(List, self).__init__() + self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] + + def __str__(self) -> str: + return "List[%s]" % self.itemtype From 5482b814a3f82f6fd52532ee4da718d892ada797 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 19:44:49 +0100 Subject: [PATCH 02/62] Rename Map and List classes to reduce ambiguity with builtin typing --- {troposphere-gen => troposphere_gen}/types.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename {troposphere-gen => troposphere_gen}/types.py (94%) diff --git a/troposphere-gen/types.py b/troposphere_gen/types.py similarity index 94% rename from troposphere-gen/types.py rename to troposphere_gen/types.py index 5f9cd5856..487ebbf85 100644 --- a/troposphere-gen/types.py +++ b/troposphere_gen/types.py @@ -56,28 +56,28 @@ def __str__(self) -> str: return self.print_class.__name__ -class Map(BaseType): +class MapType(BaseType): """Map type Map of subproperties or primitives. The keys are always strings. """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - super(Map, self).__init__() + super(MapType, self).__init__() self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] def __str__(self) -> str: return "Dict[str, %s]" % self.itemtype -class List(BaseType): +class ListType(BaseType): """List type List of subproperties or primitives. """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - super(List, self).__init__() + super(ListType, self).__init__() self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] def __str__(self) -> str: From cfe69cdb74c3e281efbe0642e8c34434983ac10b Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 19:45:07 +0100 Subject: [PATCH 03/62] Make exception more verbose --- troposphere_gen/types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/troposphere_gen/types.py b/troposphere_gen/types.py index 487ebbf85..1cf4b5fb7 100644 --- a/troposphere_gen/types.py +++ b/troposphere_gen/types.py @@ -38,7 +38,7 @@ def __init__(self, type: str): if type in type_map: self.type = type_map[type] # type: type else: - raise ValueError("Unknown type: %s" % type) + raise ValueError("Invalid primitive type: %s" % type) def __str__(self) -> str: return self.type.__name__ From 927d1a2015a96a5564dc40e663b72eea18296dc8 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 19:45:40 +0100 Subject: [PATCH 04/62] Remove unnecessary init --- troposphere_gen/types.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/troposphere_gen/types.py b/troposphere_gen/types.py index 1cf4b5fb7..d87c31c1d 100644 --- a/troposphere_gen/types.py +++ b/troposphere_gen/types.py @@ -22,9 +22,7 @@ class BaseType(): """Base Type all types inherit from""" - - def __init__(self): - raise NotImplementedError + pass class PrimitiveType(BaseType): @@ -34,7 +32,6 @@ class PrimitiveType(BaseType): """ def __init__(self, type: str): - super(PrimitiveType, self).__init__() if type in type_map: self.type = type_map[type] # type: type else: @@ -48,7 +45,6 @@ class Subproperty(BaseType): """Subproperty type defined in other part of specification""" def __init__(self, type: str) -> None: - super(Subproperty, self).__init__() self.type = type # type: str self.print_class = None # type: type @@ -63,7 +59,6 @@ class MapType(BaseType): """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - super(MapType, self).__init__() self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] def __str__(self) -> str: @@ -77,7 +72,6 @@ class ListType(BaseType): """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - super(ListType, self).__init__() self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] def __str__(self) -> str: From 6bb593b738968724c1f1e6fe5d22b47ceba3128c Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 19:46:20 +0100 Subject: [PATCH 05/62] Implement Property --- troposphere_gen/specification.py | 63 ++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 troposphere_gen/specification.py diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py new file mode 100644 index 000000000..43fafbeea --- /dev/null +++ b/troposphere_gen/specification.py @@ -0,0 +1,63 @@ +"""AWS Specification Resource Class + +These classes parse an AWS CF specification as documented here: +https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-resource-specification-format.html +""" + +from typing import Dict, List, Union +from .types import PrimitiveType, Subproperty, MapType, ListType + + +class Property(): + """Parsed property""" + + def __init__(self, name: str, propertydict: Dict) -> None: + self.name = name # type: str + self.documentation = None # type: str + self.duplicate_allowed = None # type: bool + self.item_type = None # type: Subproperty + self.primitive_item_type = None # type: PrimitiveType + self.primitive_type = None # type: PrimitiveType + self.required = None # type: bool + self.type = None # type: Union[Subproperty, ListType, MapType] + self._update_type = None # type: str + + self.parse(propertydict) + + def parse(self, propertydict: Dict) -> None: + """Parse JSON property definition""" + # Required fields for every property + self.documentation = propertydict["Documentation"] + self.update_type = propertydict["UpdateType"] + self.required = propertydict["Required"] + + # Determine type + if "PrimitiveType" in propertydict: + self.primitive_type = PrimitiveType(propertydict["PrimitiveType"]) + elif "Type" in propertydict: + if propertydict["Type"] == "List": + self.type = ListType(propertydict["Type"]) + elif propertydict["Type"] == "Map": + self.type = MapType(propertydict["Type"]) + else: + self.type = Subproperty(propertydict["Type"]) + + # Can only happen if Type is 'List' or 'Map' + if "PrimitiveItemType" in propertydict: + self.type.itemtype = PrimitiveType(propertydict["PrimitiveItemType"]) + elif "ItemType" in propertydict["ItemType"]: + self.type.itemtype = Subproperty(propertydict["ItemType"]) + + if "DuplicatesAllowed" in propertydict: + self.duplicate_allowed = propertydict["DuplicatesAllowed"] + + @property + def update_type(self) -> str: + return self._update_type + + @update_type.setter + def update_type(self, update_type: str) -> None: + if update_type in ["Immutable", "Mutable", "Conditional"]: + self._update_type = update_type + else: + raise ValueError("Invalid update type: %s" % update_type) From 6239bb072db49d4dde4645aa50143c9d9da7fa16 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 20:16:01 +0100 Subject: [PATCH 06/62] Correctly initialize Map and List --- troposphere_gen/specification.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 43fafbeea..57486c4bb 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -36,18 +36,18 @@ def parse(self, propertydict: Dict) -> None: self.primitive_type = PrimitiveType(propertydict["PrimitiveType"]) elif "Type" in propertydict: if propertydict["Type"] == "List": - self.type = ListType(propertydict["Type"]) + if "PrimitiveItemType" in propertydict: + self.type = ListType(PrimitiveType(propertydict["PrimitiveItemType"])) + elif "ItemType" in propertydict: + self.type = ListType(Subproperty(propertydict["ItemType"])) elif propertydict["Type"] == "Map": - self.type = MapType(propertydict["Type"]) + if "PrimitiveItemType" in propertydict: + self.type = MapType(PrimitiveType(propertydict["PrimitiveItemType"])) + elif "ItemType" in propertydict: + self.type = MapType(Subproperty(propertydict["ItemType"])) else: self.type = Subproperty(propertydict["Type"]) - # Can only happen if Type is 'List' or 'Map' - if "PrimitiveItemType" in propertydict: - self.type.itemtype = PrimitiveType(propertydict["PrimitiveItemType"]) - elif "ItemType" in propertydict["ItemType"]: - self.type.itemtype = Subproperty(propertydict["ItemType"]) - if "DuplicatesAllowed" in propertydict: self.duplicate_allowed = propertydict["DuplicatesAllowed"] From b979ab86c01e31be091db5ccab15c07223b004ed Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 20:17:38 +0100 Subject: [PATCH 07/62] Add unittests for Property --- tests/generator/__init__.py | 0 .../generator/test_specification_property.py | 109 ++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 tests/generator/__init__.py create mode 100644 tests/generator/test_specification_property.py diff --git a/tests/generator/__init__.py b/tests/generator/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/generator/test_specification_property.py b/tests/generator/test_specification_property.py new file mode 100644 index 000000000..5afdc8977 --- /dev/null +++ b/tests/generator/test_specification_property.py @@ -0,0 +1,109 @@ +import unittest +from troposphere_gen.specification import Property +from troposphere_gen.types import * + + +class TestProperty(unittest.TestCase): + def test_primitive(self): + propertydict = { + "Documentation": "http://example.com/foo", + "PrimitiveType": "String", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(PrimitiveType, type(prop.primitive_type)) + self.assertEqual(None, prop.type) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + + def test_map_primitive(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "Map", + "PrimitiveItemType": "String", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(MapType, type(prop.type)) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + + def test_map_subproperty(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "Map", + "ItemType": "SomeType", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(MapType, type(prop.type)) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + self.assertEqual(Subproperty, type(prop.type.itemtype)) + + def test_list_primitive(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "List", + "PrimitiveItemType": "String", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(ListType, type(prop.type)) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + + def test_list_subproperty(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "List", + "ItemType": "SomeType", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(ListType, type(prop.type)) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + self.assertEqual(Subproperty, type(prop.type.itemtype)) + + def test_subproperty(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "SomeSubProperty", + "PrimitiveItemType": "String", + "Required": False, + "UpdateType": "Mutable" + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(Subproperty, type(prop.type)) + self.assertEqual(None, prop.type.print_class) + self.assertEqual(None, prop.primitive_item_type) + self.assertEqual(None, prop.item_type) + + +if __name__ == '__main__': + unittest.main() From 4a651ca9ba84619c8f493d35f31c44e6914a613c Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 20:20:44 +0100 Subject: [PATCH 08/62] Convert type annotations to python 3.7 --- troposphere_gen/specification.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 57486c4bb..bae426c50 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -12,15 +12,15 @@ class Property(): """Parsed property""" def __init__(self, name: str, propertydict: Dict) -> None: - self.name = name # type: str - self.documentation = None # type: str - self.duplicate_allowed = None # type: bool - self.item_type = None # type: Subproperty - self.primitive_item_type = None # type: PrimitiveType - self.primitive_type = None # type: PrimitiveType - self.required = None # type: bool - self.type = None # type: Union[Subproperty, ListType, MapType] - self._update_type = None # type: str + self.name: str = name + self.documentation: str = None + self.duplicate_allowed: bool = None + self.item_type: Subproperty = None + self.primitive_item_type: PrimitiveType = None + self.primitive_type: PrimitiveType = None + self.required: bool = None + self.type: Union[Subproperty, ListType, MapType] = None + self._update_type: str = None self.parse(propertydict) From 62dcc0a5fd4a981ae6d4e2220515b624b7804db7 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 21:56:56 +0100 Subject: [PATCH 09/62] Remove reduntant item type fields --- tests/generator/test_specification_property.py | 12 ------------ troposphere_gen/specification.py | 2 -- 2 files changed, 14 deletions(-) diff --git a/tests/generator/test_specification_property.py b/tests/generator/test_specification_property.py index 5afdc8977..39b4fb770 100644 --- a/tests/generator/test_specification_property.py +++ b/tests/generator/test_specification_property.py @@ -16,8 +16,6 @@ def test_primitive(self): self.assertEqual(PrimitiveType, type(prop.primitive_type)) self.assertEqual(None, prop.type) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) def test_map_primitive(self): propertydict = { @@ -32,8 +30,6 @@ def test_map_primitive(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(MapType, type(prop.type)) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) self.assertEqual(PrimitiveType, type(prop.type.itemtype)) def test_map_subproperty(self): @@ -49,8 +45,6 @@ def test_map_subproperty(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(MapType, type(prop.type)) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) self.assertEqual(Subproperty, type(prop.type.itemtype)) def test_list_primitive(self): @@ -66,8 +60,6 @@ def test_list_primitive(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(ListType, type(prop.type)) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) self.assertEqual(PrimitiveType, type(prop.type.itemtype)) def test_list_subproperty(self): @@ -83,8 +75,6 @@ def test_list_subproperty(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(ListType, type(prop.type)) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) self.assertEqual(Subproperty, type(prop.type.itemtype)) def test_subproperty(self): @@ -101,8 +91,6 @@ def test_subproperty(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(Subproperty, type(prop.type)) self.assertEqual(None, prop.type.print_class) - self.assertEqual(None, prop.primitive_item_type) - self.assertEqual(None, prop.item_type) if __name__ == '__main__': diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index bae426c50..738aac00d 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -15,8 +15,6 @@ def __init__(self, name: str, propertydict: Dict) -> None: self.name: str = name self.documentation: str = None self.duplicate_allowed: bool = None - self.item_type: Subproperty = None - self.primitive_item_type: PrimitiveType = None self.primitive_type: PrimitiveType = None self.required: bool = None self.type: Union[Subproperty, ListType, MapType] = None From 82cc23be590bc5196e946ec6abd3c1878fdb6eaa Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 22:01:31 +0100 Subject: [PATCH 10/62] Add Attribute --- troposphere_gen/specification.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 738aac00d..df51236d0 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -59,3 +59,35 @@ def update_type(self, update_type: str) -> None: self._update_type = update_type else: raise ValueError("Invalid update type: %s" % update_type) + + +class Attribute(): + """Parsed attribute""" + + def __init__(self, name: str, attributedict: Dict) -> None: + self.name: str = name + self.item_type: Subproperty = None + self.primitive_item_type: PrimitiveType = None + self.primitive_type: PrimitiveType = None + self.type: Union[Subproperty, ListType, MapType] = None + + self.parse(attributedict) + + def parse(self, attributedict: Dict) -> None: + """Parse JSON attribute definition""" + # Determine type + if "PrimitiveType" in attributedict: + self.primitive_type = PrimitiveType(attributedict["PrimitiveType"]) + elif "Type" in attributedict: + if attributedict["Type"] == "List": + if "PrimitiveItemType" in attributedict: + self.type = ListType(PrimitiveType(attributedict["PrimitiveItemType"])) + elif "ItemType" in attributedict: + self.type = ListType(Subproperty(attributedict["ItemType"])) + elif attributedict["Type"] == "Map": + if "PrimitiveItemType" in attributedict: + self.type = MapType(PrimitiveType(attributedict["PrimitiveItemType"])) + elif "ItemType" in attributedict: + self.type = MapType(Subproperty(attributedict["ItemType"])) + else: + self.type = Subproperty(attributedict["Type"]) From 9529660d6d238a2c249597f9aa9ca03a646ad507 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 22:01:43 +0100 Subject: [PATCH 11/62] Add unit tests for Attribute --- .../generator/test_specification_attribute.py | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 tests/generator/test_specification_attribute.py diff --git a/tests/generator/test_specification_attribute.py b/tests/generator/test_specification_attribute.py new file mode 100644 index 000000000..7d848f0b3 --- /dev/null +++ b/tests/generator/test_specification_attribute.py @@ -0,0 +1,67 @@ +import unittest +from troposphere_gen.specification import Attribute +from troposphere_gen.types import * + + +class TestProperty(unittest.TestCase): + def test_primitive(self): + attributedict = { + "PrimitiveType": "String" + } + + prop = Attribute("TestAttribute", attributedict) + + self.assertEqual(PrimitiveType, type(prop.primitive_type)) + self.assertEqual(None, prop.type) + + def test_map_primitive(self): + attributedict = { + "Type": "Map", + "PrimitiveItemType": "String", + } + + prop = Attribute("TestAttribute", attributedict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(MapType, type(prop.type)) + self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + + def test_map_subproperty(self): + attributedict = { + "Type": "Map", + "ItemType": "SomeType", + } + + prop = Attribute("TestAttribute", attributedict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(MapType, type(prop.type)) + self.assertEqual(Subproperty, type(prop.type.itemtype)) + + def test_list_primitive(self): + attributedict = { + "Type": "List", + "PrimitiveItemType": "String", + } + + prop = Attribute("TestAttribute", attributedict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(ListType, type(prop.type)) + self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + + def test_list_subproperty(self): + attributedict = { + "Type": "List", + "ItemType": "SomeType", + } + + prop = Attribute("TestAttribute", attributedict) + + self.assertEqual(None, prop.primitive_type) + self.assertEqual(ListType, type(prop.type)) + self.assertEqual(Subproperty, type(prop.type.itemtype)) + + +if __name__ == '__main__': + unittest.main() From 5e5b62c1e88735a7e06741b082cd64d01745d481 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 22:10:45 +0100 Subject: [PATCH 12/62] Refactor to reuse code --- troposphere_gen/specification.py | 89 +++++++++++++------------------- 1 file changed, 36 insertions(+), 53 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index df51236d0..d9fc4cb89 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -8,59 +8,6 @@ from .types import PrimitiveType, Subproperty, MapType, ListType -class Property(): - """Parsed property""" - - def __init__(self, name: str, propertydict: Dict) -> None: - self.name: str = name - self.documentation: str = None - self.duplicate_allowed: bool = None - self.primitive_type: PrimitiveType = None - self.required: bool = None - self.type: Union[Subproperty, ListType, MapType] = None - self._update_type: str = None - - self.parse(propertydict) - - def parse(self, propertydict: Dict) -> None: - """Parse JSON property definition""" - # Required fields for every property - self.documentation = propertydict["Documentation"] - self.update_type = propertydict["UpdateType"] - self.required = propertydict["Required"] - - # Determine type - if "PrimitiveType" in propertydict: - self.primitive_type = PrimitiveType(propertydict["PrimitiveType"]) - elif "Type" in propertydict: - if propertydict["Type"] == "List": - if "PrimitiveItemType" in propertydict: - self.type = ListType(PrimitiveType(propertydict["PrimitiveItemType"])) - elif "ItemType" in propertydict: - self.type = ListType(Subproperty(propertydict["ItemType"])) - elif propertydict["Type"] == "Map": - if "PrimitiveItemType" in propertydict: - self.type = MapType(PrimitiveType(propertydict["PrimitiveItemType"])) - elif "ItemType" in propertydict: - self.type = MapType(Subproperty(propertydict["ItemType"])) - else: - self.type = Subproperty(propertydict["Type"]) - - if "DuplicatesAllowed" in propertydict: - self.duplicate_allowed = propertydict["DuplicatesAllowed"] - - @property - def update_type(self) -> str: - return self._update_type - - @update_type.setter - def update_type(self, update_type: str) -> None: - if update_type in ["Immutable", "Mutable", "Conditional"]: - self._update_type = update_type - else: - raise ValueError("Invalid update type: %s" % update_type) - - class Attribute(): """Parsed attribute""" @@ -91,3 +38,39 @@ def parse(self, attributedict: Dict) -> None: self.type = MapType(Subproperty(attributedict["ItemType"])) else: self.type = Subproperty(attributedict["Type"]) + + +class Property(Attribute): + """Parsed property""" + + def __init__(self, name: str, propertydict: Dict) -> None: + super(Property, self).__init__(name, propertydict) + self.documentation: str = None + self.duplicate_allowed: bool = None + self.required: bool = None + self._update_type: str = None + + self.parse(propertydict) + + def parse(self, propertydict: Dict) -> None: + """Parse JSON property definition""" + super(Property, self).parse(propertydict) + + # Required fields for every property + self.documentation = propertydict["Documentation"] + self.update_type = propertydict["UpdateType"] + self.required = propertydict["Required"] + + if "DuplicatesAllowed" in propertydict: + self.duplicate_allowed = propertydict["DuplicatesAllowed"] + + @property + def update_type(self) -> str: + return self._update_type + + @update_type.setter + def update_type(self, update_type: str) -> None: + if update_type in ["Immutable", "Mutable", "Conditional"]: + self._update_type = update_type + else: + raise ValueError("Invalid update type: %s" % update_type) From a1daf74a3af9a8d61af5ce8531b941d1e1860659 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 22:11:00 +0100 Subject: [PATCH 13/62] Add more tests --- .../generator/test_specification_property.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/generator/test_specification_property.py b/tests/generator/test_specification_property.py index 39b4fb770..27a7000f0 100644 --- a/tests/generator/test_specification_property.py +++ b/tests/generator/test_specification_property.py @@ -4,6 +4,34 @@ class TestProperty(unittest.TestCase): + def test_fields(self): + propertydict = { + "Documentation": "http://example.com/foo", + "Type": "List", + "PrimitiveItemType": "String", + "Required": True, + "UpdateType": "Mutable", + "DuplicatesAllowed": True + } + + prop = Property("TestProperty", propertydict) + + self.assertEqual(propertydict["Documentation"], prop.documentation) + self.assertTrue(prop.required) + self.assertEqual(propertydict["UpdateType"], prop.update_type) + self.assertTrue(prop.duplicate_allowed) + + def test_updatetype_setter_exception(self): + propertydict = { + "Documentation": "http://example.com/foo", + "PrimitiveType": "String", + "Required": True, + "UpdateType": "Bogus", + } + + with self.assertRaises(ValueError): + prop = Property("TestProperty", propertydict) + def test_primitive(self): propertydict = { "Documentation": "http://example.com/foo", From 2073364faa197af66313f271c8c897241b560be1 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 23:19:18 +0100 Subject: [PATCH 14/62] Update import --- troposphere_gen/specification.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index d9fc4cb89..fbe53c092 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -4,8 +4,8 @@ https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-resource-specification-format.html """ -from typing import Dict, List, Union -from .types import PrimitiveType, Subproperty, MapType, ListType +from typing import Dict, Union +from troposphere_gen.types import PrimitiveType, Subproperty, MapType, ListType class Attribute(): From cc7f7529d779c871214972c23f9308430394f281 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 23:25:49 +0100 Subject: [PATCH 15/62] Name variable correctly --- .../generator/test_specification_attribute.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/tests/generator/test_specification_attribute.py b/tests/generator/test_specification_attribute.py index 7d848f0b3..feef0d97e 100644 --- a/tests/generator/test_specification_attribute.py +++ b/tests/generator/test_specification_attribute.py @@ -9,10 +9,10 @@ def test_primitive(self): "PrimitiveType": "String" } - prop = Attribute("TestAttribute", attributedict) + attrib = Attribute("TestAttribute", attributedict) - self.assertEqual(PrimitiveType, type(prop.primitive_type)) - self.assertEqual(None, prop.type) + self.assertEqual(PrimitiveType, type(attrib.primitive_type)) + self.assertEqual(None, attrib.type) def test_map_primitive(self): attributedict = { @@ -20,11 +20,11 @@ def test_map_primitive(self): "PrimitiveItemType": "String", } - prop = Attribute("TestAttribute", attributedict) + attrib = Attribute("TestAttribute", attributedict) - self.assertEqual(None, prop.primitive_type) - self.assertEqual(MapType, type(prop.type)) - self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + self.assertEqual(None, attrib.primitive_type) + self.assertEqual(MapType, type(attrib.type)) + self.assertEqual(PrimitiveType, type(attrib.type.itemtype)) def test_map_subproperty(self): attributedict = { @@ -32,11 +32,11 @@ def test_map_subproperty(self): "ItemType": "SomeType", } - prop = Attribute("TestAttribute", attributedict) + attrib = Attribute("TestAttribute", attributedict) - self.assertEqual(None, prop.primitive_type) - self.assertEqual(MapType, type(prop.type)) - self.assertEqual(Subproperty, type(prop.type.itemtype)) + self.assertEqual(None, attrib.primitive_type) + self.assertEqual(MapType, type(attrib.type)) + self.assertEqual(Subproperty, type(attrib.type.itemtype)) def test_list_primitive(self): attributedict = { @@ -44,11 +44,11 @@ def test_list_primitive(self): "PrimitiveItemType": "String", } - prop = Attribute("TestAttribute", attributedict) + attrib = Attribute("TestAttribute", attributedict) - self.assertEqual(None, prop.primitive_type) - self.assertEqual(ListType, type(prop.type)) - self.assertEqual(PrimitiveType, type(prop.type.itemtype)) + self.assertEqual(None, attrib.primitive_type) + self.assertEqual(ListType, type(attrib.type)) + self.assertEqual(PrimitiveType, type(attrib.type.itemtype)) def test_list_subproperty(self): attributedict = { @@ -56,11 +56,11 @@ def test_list_subproperty(self): "ItemType": "SomeType", } - prop = Attribute("TestAttribute", attributedict) + attrib = Attribute("TestAttribute", attributedict) - self.assertEqual(None, prop.primitive_type) - self.assertEqual(ListType, type(prop.type)) - self.assertEqual(Subproperty, type(prop.type.itemtype)) + self.assertEqual(None, attrib.primitive_type) + self.assertEqual(ListType, type(attrib.type)) + self.assertEqual(Subproperty, type(attrib.type.itemtype)) if __name__ == '__main__': From 9c055c2b5ae7b1d6e6198cf3bcd67cb57977d85c Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 23:26:17 +0100 Subject: [PATCH 16/62] Add resource --- troposphere_gen/specification.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index fbe53c092..8ea0e9e47 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -74,3 +74,25 @@ def update_type(self, update_type: str) -> None: self._update_type = update_type else: raise ValueError("Invalid update type: %s" % update_type) + + +class Resource(): + """Parsed resource""" + + def __init__(self, name: str, resourcedict: Dict) -> None: + self.name: str = name + self.documentation: str = None + self.attributes: Dict[str, Attribute] = {} + self.properties: Dict[str, Property] = {} + + self.parse(resourcedict) + + def parse(self, resourcedict: Dict) -> None: + """Parse JSON resource definition""" + self.documentation = resourcedict["Documentation"] + + for name, attributedict in resourcedict["Attributes"].items(): + self.attributes[name] = Attribute(name, attributedict) + + for name, propertydict in resourcedict["Properties"].items(): + self.properties[name] = Property(name, propertydict) From 4a15b7511cf459a93064f496e3fb1670588d2a38 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 25 Jan 2019 23:26:37 +0100 Subject: [PATCH 17/62] Add unit test for resource --- .../generator/test_specifications_resource.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/generator/test_specifications_resource.py diff --git a/tests/generator/test_specifications_resource.py b/tests/generator/test_specifications_resource.py new file mode 100644 index 000000000..3ab8847c2 --- /dev/null +++ b/tests/generator/test_specifications_resource.py @@ -0,0 +1,45 @@ +import unittest +from troposphere_gen.specification import Resource + + +class TestProperty(unittest.TestCase): + def test_resource(self): + resourcedict = { + "Attributes": { + "SomeAttrib": { + "PrimitiveType": "String" + }, + "AnotherAttrib": { + "PrimitiveType": "String" + } + }, + "Documentation": "http://example.com/foo", + "Properties": { + "SomeProp": { + "Documentation": "http://example.com/foo", + "Required": True, + "PrimitiveType": "String", + "UpdateType": "Mutable" + }, + "AnotherProp": { + "Documentation": "http://example.com/foo", + "Required": True, + "PrimitiveType": "String", + "UpdateType": "Mutable" + } + } + } + + res = Resource("SomeRes", resourcedict) + + self.assertEqual(resourcedict["Documentation"], res.documentation) + self.assertEqual(2, len(res.attributes.values())) + self.assertEqual(2, len(res.properties.values())) + self.assertIn("SomeAttrib", res.attributes) + self.assertIn("AnotherAttrib", res.attributes) + self.assertIn("SomeProp", res.properties) + self.assertIn("AnotherProp", res.properties) + + +if __name__ == '__main__': + unittest.main() From 182c6c5fd0f43ed1b8b979c7a9fc91a80cd34804 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 12:09:38 +0100 Subject: [PATCH 18/62] Accomodate for subproperties --- troposphere_gen/specification.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 8ea0e9e47..5ff443dff 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -44,23 +44,29 @@ class Property(Attribute): """Parsed property""" def __init__(self, name: str, propertydict: Dict) -> None: - super(Property, self).__init__(name, propertydict) self.documentation: str = None self.duplicate_allowed: bool = None self.required: bool = None self._update_type: str = None + self.properties: Dict[str, Property] = {} - self.parse(propertydict) + super(Property, self).__init__(name, propertydict) def parse(self, propertydict: Dict) -> None: """Parse JSON property definition""" - super(Property, self).parse(propertydict) - - # Required fields for every property self.documentation = propertydict["Documentation"] + + # If property contains subproperties, only parse those + if "Properties" in propertydict: + for name, subpropertydict in propertydict["Properties"].items(): + self.properties[name] = Property(name, subpropertydict) + return + self.update_type = propertydict["UpdateType"] self.required = propertydict["Required"] + super(Property, self).parse(propertydict) + if "DuplicatesAllowed" in propertydict: self.duplicate_allowed = propertydict["DuplicatesAllowed"] From 0e9ee288aee100f597ec08db260bb94bdcf2127d Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 12:09:56 +0100 Subject: [PATCH 19/62] Add specification --- troposphere_gen/specification.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 5ff443dff..9d8ab53b4 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -6,6 +6,7 @@ from typing import Dict, Union from troposphere_gen.types import PrimitiveType, Subproperty, MapType, ListType +from distutils.version import StrictVersion class Attribute(): @@ -102,3 +103,24 @@ def parse(self, resourcedict: Dict) -> None: for name, propertydict in resourcedict["Properties"].items(): self.properties[name] = Property(name, propertydict) + + +class Specification(): + def __init__(self, name: str, specificationdict: Dict) -> None: + self.name: str = name + self.resource_specification_version: StrictVersion = None + self.property_types: Dict[str, Property] = {} + self.resource_types: Dict[str, Resource] = {} + + self.parse(specificationdict) + + def parse(self, specificationsdict: Dict) -> None: + self.resource_specification_version = StrictVersion(specificationsdict["ResourceSpecificationVersion"]) + + if "PropertyTypes" in specificationsdict: + for name, attributedict in specificationsdict["PropertyTypes"].items(): + self.property_types[name] = Property(name, attributedict) + + if "ResourceType" in specificationsdict: + for name, propertydict in specificationsdict["ResourceType"].items(): + self.resource_types[name] = Resource(name, propertydict) From 40a0bf718121786b4515abd4c797581443fc8419 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 12:10:20 +0100 Subject: [PATCH 20/62] Add unit test for specification --- tests/generator/specification_testdata.json | 170 ++++++++++++++++++ .../test_specification_specification.py | 23 +++ 2 files changed, 193 insertions(+) create mode 100644 tests/generator/specification_testdata.json create mode 100644 tests/generator/test_specification_specification.py diff --git a/tests/generator/specification_testdata.json b/tests/generator/specification_testdata.json new file mode 100644 index 000000000..d9b3e054b --- /dev/null +++ b/tests/generator/specification_testdata.json @@ -0,0 +1,170 @@ +{ + "PropertyTypes": { + "AWS::Elasticsearch::Domain.EBSOptions": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-ebsoptions.html", + "Properties": { + "EBSEnabled": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-ebsoptions.html#cfn-elasticsearch-domain-ebsoptions-ebsenabled", + "PrimitiveType": "Boolean", + "Required": false, + "UpdateType": "Mutable" + }, + "Iops": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-ebsoptions.html#cfn-elasticsearch-domain-ebsoptions-iops", + "PrimitiveType": "Integer", + "Required": false, + "UpdateType": "Mutable" + }, + "VolumeSize": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-ebsoptions.html#cfn-elasticsearch-domain-ebsoptions-volumesize", + "PrimitiveType": "Integer", + "Required": false, + "UpdateType": "Mutable" + }, + "VolumeType": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-ebsoptions.html#cfn-elasticsearch-domain-ebsoptions-volumetype", + "PrimitiveType": "String", + "Required": false, + "UpdateType": "Mutable" + } + } + }, + "AWS::Elasticsearch::Domain.ElasticsearchClusterConfig": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html", + "Properties": { + "DedicatedMasterCount": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-dedicatedmastercount", + "PrimitiveType": "Integer", + "Required": false, + "UpdateType": "Mutable" + }, + "DedicatedMasterEnabled": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-dedicatedmasterenabled", + "PrimitiveType": "Boolean", + "Required": false, + "UpdateType": "Mutable" + }, + "DedicatedMasterType": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-dedicatedmastertype", + "PrimitiveType": "String", + "Required": false, + "UpdateType": "Mutable" + }, + "InstanceCount": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-instancecount", + "PrimitiveType": "Integer", + "Required": false, + "UpdateType": "Mutable" + }, + "InstanceType": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-instnacetype", + "PrimitiveType": "String", + "Required": false, + "UpdateType": "Mutable" + }, + "ZoneAwarenessEnabled": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-elasticsearchclusterconfig.html#cfn-elasticsearch-domain-elasticseachclusterconfig-zoneawarenessenabled", + "PrimitiveType": "Boolean", + "Required": false, + "UpdateType": "Mutable" + } + } + }, + "AWS::Elasticsearch::Domain.SnapshotOptions": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-snapshotoptions.html", + "Properties": { + "AutomatedSnapshotStartHour": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticsearch-domain-snapshotoptions.html#cfn-elasticsearch-domain-snapshotoptions-automatedsnapshotstarthour", + "PrimitiveType": "Integer", + "Required": false, + "UpdateType": "Mutable" + } + } + }, + "Tag": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-resource-tags.html", + "Properties": { + "Key": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-resource-tags.html#cfn-resource-tags-key", + "PrimitiveType": "String", + "Required": true, + "UpdateType": "Immutable" + }, + "Value": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-resource-tags.html#cfn-resource-tags-value", + "PrimitiveType": "String", + "Required": true, + "UpdateType": "Immutable" + } + } + } + }, + "ResourceType": { + "AWS::Elasticsearch::Domain": { + "Attributes": { + "DomainArn": { + "PrimitiveType": "String" + }, + "DomainEndpoint": { + "PrimitiveType": "String" + } + }, + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html", + "Properties": { + "AccessPolicies": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-accesspolicies", + "PrimitiveType": "Json", + "Required": false, + "UpdateType": "Mutable" + }, + "AdvancedOptions": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-advancedoptions", + "DuplicatesAllowed": false, + "PrimitiveItemType": "String", + "Required": false, + "Type": "Map", + "UpdateType": "Mutable" + }, + "DomainName": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-domainname", + "PrimitiveType": "String", + "Required": false, + "UpdateType": "Immutable" + }, + "EBSOptions": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-ebsoptions", + "Required": false, + "Type": "EBSOptions", + "UpdateType": "Mutable" + }, + "ElasticsearchClusterConfig": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-elasticsearchclusterconfig", + "Required": false, + "Type": "ElasticsearchClusterConfig", + "UpdateType": "Mutable" + }, + "ElasticsearchVersion": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-elasticsearchversion", + "PrimitiveType": "String", + "Required": false, + "UpdateType": "Immutable" + }, + "SnapshotOptions": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-snapshotoptions", + "Required": false, + "Type": "SnapshotOptions", + "UpdateType": "Mutable" + }, + "Tags": { + "Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html#cfn-elasticsearch-domain-tags", + "DuplicatesAllowed": true, + "ItemType": "Tag", + "Required": false, + "Type": "List", + "UpdateType": "Mutable" + } + } + } + }, + "ResourceSpecificationVersion": "1.4.1" +} diff --git a/tests/generator/test_specification_specification.py b/tests/generator/test_specification_specification.py new file mode 100644 index 000000000..10a5ab35e --- /dev/null +++ b/tests/generator/test_specification_specification.py @@ -0,0 +1,23 @@ +import unittest +from troposphere_gen.specification import Specification +import json + +from distutils.version import StrictVersion + + +class TestProperty(unittest.TestCase): + def test_resource(self): + with open("specification_testdata.json", "r") as f: + specificationdict = json.load(f) + + spec = Specification("SomeSpecification", specificationdict) + + # Check version and whether correct amount of resources and properties exist + self.assertEqual(StrictVersion(specificationdict["ResourceSpecificationVersion"]), + spec.resource_specification_version) + self.assertEqual(4, len(spec.property_types.values())) + self.assertEqual(1, len(spec.resource_types.values())) + + +if __name__ == '__main__': + unittest.main() From 6ffc5d262a3cc5b6d3c1a438c9bde29dafe5ce2a Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 12:14:25 +0100 Subject: [PATCH 21/62] Remove code generation code from types --- .../generator/test_specification_property.py | 1 - troposphere_gen/types.py | 35 ++++--------------- 2 files changed, 6 insertions(+), 30 deletions(-) diff --git a/tests/generator/test_specification_property.py b/tests/generator/test_specification_property.py index 27a7000f0..8cee7c376 100644 --- a/tests/generator/test_specification_property.py +++ b/tests/generator/test_specification_property.py @@ -118,7 +118,6 @@ def test_subproperty(self): self.assertEqual(None, prop.primitive_type) self.assertEqual(Subproperty, type(prop.type)) - self.assertEqual(None, prop.type.print_class) if __name__ == '__main__': diff --git a/troposphere_gen/types.py b/troposphere_gen/types.py index d87c31c1d..7efe17ddf 100644 --- a/troposphere_gen/types.py +++ b/troposphere_gen/types.py @@ -9,16 +9,6 @@ from typing import Union -type_map = { - "String": str, - "Long": int, - "Integer": int, - "Double": float, - "Boolean": bool, - "Timestamp": str, - "Json": dict -} - class BaseType(): """Base Type all types inherit from""" @@ -28,28 +18,21 @@ class BaseType(): class PrimitiveType(BaseType): """Primitive type - Primitive types are String, Long, Integer, Double, Boolean, or Timestamp. + Primitive types are String, Long, Integer, Double, Boolean, Timestamp, JSON """ def __init__(self, type: str): - if type in type_map: - self.type = type_map[type] # type: type + if type in ["String", "Long", "Integer", "Double", "Boolean", "Timestamp", "Json"]: + self.type: str = type else: raise ValueError("Invalid primitive type: %s" % type) - def __str__(self) -> str: - return self.type.__name__ - class Subproperty(BaseType): """Subproperty type defined in other part of specification""" def __init__(self, type: str) -> None: - self.type = type # type: str - self.print_class = None # type: type - - def __str__(self) -> str: - return self.print_class.__name__ + self.type: str = type class MapType(BaseType): @@ -59,10 +42,7 @@ class MapType(BaseType): """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] - - def __str__(self) -> str: - return "Dict[str, %s]" % self.itemtype + self.itemtype: Union[Subproperty, PrimitiveType] = itemtype class ListType(BaseType): @@ -72,7 +52,4 @@ class ListType(BaseType): """ def __init__(self, itemtype: Union[Subproperty, PrimitiveType]) -> None: - self.itemtype = itemtype # type: Union[Subproperty, PrimitiveType] - - def __str__(self) -> str: - return "List[%s]" % self.itemtype + self.itemtype: Union[Subproperty, PrimitiveType] = itemtype From 36b9b9f987b761b139fc9f598c211990a3a5a5fc Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 21:53:50 +0100 Subject: [PATCH 22/62] Remove name from Specification --- tests/generator/test_specification_specification.py | 2 +- troposphere_gen/specification.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/generator/test_specification_specification.py b/tests/generator/test_specification_specification.py index 10a5ab35e..d98733221 100644 --- a/tests/generator/test_specification_specification.py +++ b/tests/generator/test_specification_specification.py @@ -10,7 +10,7 @@ def test_resource(self): with open("specification_testdata.json", "r") as f: specificationdict = json.load(f) - spec = Specification("SomeSpecification", specificationdict) + spec = Specification(specificationdict) # Check version and whether correct amount of resources and properties exist self.assertEqual(StrictVersion(specificationdict["ResourceSpecificationVersion"]), diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 9d8ab53b4..438abb3bd 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -106,8 +106,7 @@ def parse(self, resourcedict: Dict) -> None: class Specification(): - def __init__(self, name: str, specificationdict: Dict) -> None: - self.name: str = name + def __init__(self, specificationdict: Dict) -> None: self.resource_specification_version: StrictVersion = None self.property_types: Dict[str, Property] = {} self.resource_types: Dict[str, Resource] = {} From c6e8e9dd61918219881e370daf00faf51f9e0711 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 26 Jan 2019 21:57:52 +0100 Subject: [PATCH 23/62] Make Documentation, UpdateType and Required optional --- troposphere_gen/specification.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 438abb3bd..f81ecb58d 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -55,7 +55,10 @@ def __init__(self, name: str, propertydict: Dict) -> None: def parse(self, propertydict: Dict) -> None: """Parse JSON property definition""" - self.documentation = propertydict["Documentation"] + if "Documentation" in propertydict: + # Not all properties have documentation, for example + # AWS::EC2::LaunchTemplate.CapacityReservationPreference + self.documentation = propertydict["Documentation"] # If property contains subproperties, only parse those if "Properties" in propertydict: @@ -63,8 +66,10 @@ def parse(self, propertydict: Dict) -> None: self.properties[name] = Property(name, subpropertydict) return - self.update_type = propertydict["UpdateType"] - self.required = propertydict["Required"] + if "UpdateType" in propertydict: + self.update_type = propertydict["UpdateType"] + if "Required" in propertydict: + self.required = propertydict["Required"] super(Property, self).parse(propertydict) From 2bf70917c8bb6b6de47397740326183283b27e91 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 01:35:48 +0100 Subject: [PATCH 24/62] Determine whether property is common or namespaced --- troposphere_gen/specification.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index f81ecb58d..131935dc8 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -51,6 +51,9 @@ def __init__(self, name: str, propertydict: Dict) -> None: self._update_type: str = None self.properties: Dict[str, Property] = {} + # If name isn't namespaced, property is common (example: Tag) + self.common: bool = "::" not in name + super(Property, self).__init__(name, propertydict) def parse(self, propertydict: Dict) -> None: From 972bc9a9c93d648d3d7b5765a67b93a7221b44e9 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 02:31:36 +0100 Subject: [PATCH 25/62] Add special case for error in specification --- troposphere_gen/types.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/troposphere_gen/types.py b/troposphere_gen/types.py index 7efe17ddf..a404eb27b 100644 --- a/troposphere_gen/types.py +++ b/troposphere_gen/types.py @@ -22,7 +22,9 @@ class PrimitiveType(BaseType): """ def __init__(self, type: str): - if type in ["String", "Long", "Integer", "Double", "Boolean", "Timestamp", "Json"]: + # Map is added here because AWS::ServiceDiscovery::Instance.InstanceAttributes has 'Map' as PrimitiveType + # Remove it once AWS fixed this + if type in ["String", "Long", "Integer", "Double", "Boolean", "Timestamp", "Json", "Map"]: self.type: str = type else: raise ValueError("Invalid primitive type: %s" % type) From f87013637ff204a1fe974d4d5d157c43666c97ac Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 02:38:13 +0100 Subject: [PATCH 26/62] Make Attributes and Properties optional --- troposphere_gen/specification.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index 131935dc8..e6e1e363c 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -106,11 +106,13 @@ def parse(self, resourcedict: Dict) -> None: """Parse JSON resource definition""" self.documentation = resourcedict["Documentation"] - for name, attributedict in resourcedict["Attributes"].items(): - self.attributes[name] = Attribute(name, attributedict) + if "Attributes" in resourcedict: + for name, attributedict in resourcedict["Attributes"].items(): + self.attributes[name] = Attribute(name, attributedict) - for name, propertydict in resourcedict["Properties"].items(): - self.properties[name] = Property(name, propertydict) + if "Properties" in resourcedict: + for name, propertydict in resourcedict["Properties"].items(): + self.properties[name] = Property(name, propertydict) class Specification(): @@ -128,6 +130,6 @@ def parse(self, specificationsdict: Dict) -> None: for name, attributedict in specificationsdict["PropertyTypes"].items(): self.property_types[name] = Property(name, attributedict) - if "ResourceType" in specificationsdict: - for name, propertydict in specificationsdict["ResourceType"].items(): + if "ResourceTypes" in specificationsdict: + for name, propertydict in specificationsdict["ResourceTypes"].items(): self.resource_types[name] = Resource(name, propertydict) From 7b0c520270354e6fd15ab9bf45d20357064046fc Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:13:07 +0100 Subject: [PATCH 27/62] Add codedata module --- troposphere_gen/codedata.py | 73 +++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 troposphere_gen/codedata.py diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py new file mode 100644 index 000000000..6c0eff824 --- /dev/null +++ b/troposphere_gen/codedata.py @@ -0,0 +1,73 @@ +from typing import Dict, Union + +import re + +from troposphere_gen.specification import Property, Resource + + +def module_name_from_namespace(namespace: str) -> str: + """Parse module name from AWS namespace + + Examples: + AWS::EC2::InternetGateway -> EC2 + AWS::EC2::LaunchTemplate.PrivateIpAdd -> EC2 + """ + match = re.match(r"(?:AWS|Alexa)::(.*)::.*", namespace) + return match.group(1) + + +def class_name_from_property_name(propertyname: str) -> str: + """Parse AWS namespaced property names to class names + + Example: + AWS::WAF::SizeConstraintSet.FieldToMatch -> FieldToMatch + """ + match = re.match(r"(?:AWS|Alexa)::.*::.*\.(.*)", propertyname) + return match.group(1) + + +def class_name_from_resource_name(resourcename: str) -> str: + """Parse AWS namespaced resource name to class name + + Example: + AWS::WAF::SizeConstraintSet -> SizeConstraintSet + """ + match = re.match(r"(?:AWS|Alexa)::.*::(.*)", resourcename) + return match.group(1) + + +class ModuleData(): + """Convert multiple Specifications belonging to AWS Resource to module""" + + def __init__(self, name: str): + self.modulename: str = name + + self.properties: Dict[str, ClassData] = {} + self.resources: Dict[str, ClassData] = {} + + def add_property(self, name: str, property: Property) -> None: + self.properties[name] = ClassData(name, property) + + def add_resource(self, name: str, resource: Resource) -> None: + self.resources[name] = ClassData(name, resource) + + +class ClassData(): + """Convert Property or Resource to required classdata""" + + def __init__(self, name: str, data: Union[Property, Resource]) -> None: + if type(data) is Property and data.common: + self.classname: str = name + elif type(data) is Property: + self.classname: str = class_name_from_property_name(name) + elif type(data) is Resource: + self.classname: str = class_name_from_resource_name(name) + self.data: Property = data + + self.subproperties: Dict[str, Property] = {} + self.get_subproperties() + + def get_subproperties(self) -> None: + """Gets all subproperties of property""" + for name, prop in self.data.properties.items(): + self.subproperties[name] = prop From e9b48775af3a9b4b2769649c8633a411a9174900 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:13:19 +0100 Subject: [PATCH 28/62] Add generator module --- troposphere_gen/generator.py | 56 ++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 troposphere_gen/generator.py diff --git a/troposphere_gen/generator.py b/troposphere_gen/generator.py new file mode 100644 index 000000000..06e715dff --- /dev/null +++ b/troposphere_gen/generator.py @@ -0,0 +1,56 @@ +"""Generator that takes data parsed from specification and generates code + +The code generator takes data parsed from specification, and converts it into +modules with classes. The code style is dictated by the policy used. +""" + +from troposphere_gen.specification import Specification +from troposphere_gen.codedata import ModuleData +from troposphere_gen.codedata import module_name_from_namespace + +from typing import Dict + +# Some services are named after reserved keywords or require other exceptions +modname_exceptions = { + "Lambda": "AwsLambda" +} + + +class Generator(): + def __init__(self, specification: Specification): + self.specification: Specification = specification + + self.modules: Dict[str, ModuleData] = {} + + self.gen_property_classdata() + self.gen_resource_classdata() + + def gen_property_classdata(self): + """Generates class data for each property and adds it to module""" + for name, property in self.specification.property_types.items(): + moddata = self.get_module(name) + + moddata.add_property(name, property) + + def gen_resource_classdata(self): + """Generates class data for each property and adds it to module""" + for name, resource in self.specification.resource_types.items(): + moddata = self.get_module(name) + + moddata.add_resource(name, resource) + + def get_module(self, name: str) -> ModuleData: + """Find or create module from namespaced name""" + if not "::" in name: + # Some properties aren't namespaced, for example 'Tag' + modname = "common" + else: + modname = module_name_from_namespace(name) + + if modname in modname_exceptions: + modname = modname_exceptions[modname] + + if modname not in self.modules: + self.modules[modname] = ModuleData(modname) + + return self.modules[modname] From 080bf460f0f50e20a217792b47b179873ab05410 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:13:40 +0100 Subject: [PATCH 29/62] Add policy module --- troposphere_gen/policy.py | 90 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 troposphere_gen/policy.py diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py new file mode 100644 index 000000000..0fea98f63 --- /dev/null +++ b/troposphere_gen/policy.py @@ -0,0 +1,90 @@ +"""A policy contains information on how code shall be generated from data + +The policy determines how the specification data is converted to code. For +example it tells to use the 'print(foo)' syntax for python3 code, and use +'print foo' for python2 code. +""" + +from troposphere_gen.codedata import ModuleData, ClassData +from troposphere_gen.specification import Property, Resource + +import re + +import datetime + + +def cc_to_sc(name: str) -> str: + """Convert CamelCase to snake_case""" + s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + + +class Policy(): + def __init__(self): + pass + + def get_type(self, prop: Property) -> str: + type_map = { + "String": "str", + "Long": "int", + "Integer": "int", + "Double": "float", + "Boolean": "bool", + "Timestamp": "str", # TODO: Add Timestamp class to troposphere + "Json": "Dict", + "Map": "Dict" # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove + } + + if prop.primitive_type is not None: + return type_map[prop.primitive_type.type] + + def module_head_format(self, moduledata: ModuleData): + """Construct module code + + """ + modulename: str = moduledata.modulename + + # Copyright (c) 2012-2018, Mark Peek + # All rights reserved. + # + # See LICENSE file for full license. + + license_head = ( + f"\"\"\"Module for AWS {modulename} service\n" + f"Copyright (c) 2012-{datetime.datetime.now().year}, Mark Peek \n" + f"All rights reserved.\n" + f"\n" + f"See LICENSE file for full license." + f"\"\"\"\n" + ) + + imports = "\nfrom troposphere import AWSProperty, AWSResource\n" + if modulename is not "common": + imports += "from troposphere.common import Tag\n" + + modulecode = license_head + imports + "\n" + + return modulecode + + def class_format(self, classdata: ClassData) -> str: + """Construct class code + + """ + + if type(classdata.data) is Property: + parentclass: str = "AWSProperty" + elif type(classdata.data) is Resource: + parentclass: str = "AWSResource" + + properties: str = "" + for name, prop in classdata.subproperties.items(): + properties += f"\t\t'{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required})\n" + + classcode = ( + f"class {classdata.classname}(AWSProperty):\n" + f" props = {{\n" + f"{properties}" + f" }}\n" + ) + + return classcode From f798b69646816c13dd465f30e3ae2baf1d5cb56b Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:15:08 +0100 Subject: [PATCH 30/62] Add TODO to workaround --- troposphere_gen/types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/troposphere_gen/types.py b/troposphere_gen/types.py index a404eb27b..d9853730c 100644 --- a/troposphere_gen/types.py +++ b/troposphere_gen/types.py @@ -23,7 +23,7 @@ class PrimitiveType(BaseType): def __init__(self, type: str): # Map is added here because AWS::ServiceDiscovery::Instance.InstanceAttributes has 'Map' as PrimitiveType - # Remove it once AWS fixed this + # Remove it once AWS fixed this TODO: remove if type in ["String", "Long", "Integer", "Double", "Boolean", "Timestamp", "Json", "Map"]: self.type: str = type else: From e11983bba499f83e4fdc5c3a2fc0b978f52bf84c Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:24:04 +0100 Subject: [PATCH 31/62] Use parentclass --- troposphere_gen/policy.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 0fea98f63..6d6a3a599 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -74,14 +74,14 @@ def class_format(self, classdata: ClassData) -> str: if type(classdata.data) is Property: parentclass: str = "AWSProperty" elif type(classdata.data) is Resource: - parentclass: str = "AWSResource" + parentclass: str = "AWSObject" properties: str = "" for name, prop in classdata.subproperties.items(): - properties += f"\t\t'{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required})\n" + properties += f"\t\t'{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required}),\n" classcode = ( - f"class {classdata.classname}(AWSProperty):\n" + f"class {classdata.classname}({parentclass}):\n" f" props = {{\n" f"{properties}" f" }}\n" From 2c29940767d9cea4eadd25f38a6ffafda2f38654 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:27:20 +0100 Subject: [PATCH 32/62] Use correct parent class --- troposphere_gen/policy.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 6d6a3a599..f7aa1937f 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -58,7 +58,8 @@ def module_head_format(self, moduledata: ModuleData): f"\"\"\"\n" ) - imports = "\nfrom troposphere import AWSProperty, AWSResource\n" + imports = "\nfrom troposphere import AWSProperty, AWSObject\n" + imports += "from typing import Dict, List\n" if modulename is not "common": imports += "from troposphere.common import Tag\n" @@ -78,7 +79,7 @@ def class_format(self, classdata: ClassData) -> str: properties: str = "" for name, prop in classdata.subproperties.items(): - properties += f"\t\t'{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required}),\n" + properties += f" '{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required}),\n" classcode = ( f"class {classdata.classname}({parentclass}):\n" From fe1ee2ec9633a1f707256a5d9dbecf8b596131bf Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:29:05 +0100 Subject: [PATCH 33/62] Add spacing methods --- troposphere_gen/policy.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index f7aa1937f..a47eb3ef5 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -49,12 +49,12 @@ def module_head_format(self, moduledata: ModuleData): # # See LICENSE file for full license. - license_head = ( + docstring = ( f"\"\"\"Module for AWS {modulename} service\n" f"Copyright (c) 2012-{datetime.datetime.now().year}, Mark Peek \n" f"All rights reserved.\n" f"\n" - f"See LICENSE file for full license." + f"See LICENSE file for full license.\n" f"\"\"\"\n" ) @@ -63,7 +63,7 @@ def module_head_format(self, moduledata: ModuleData): if modulename is not "common": imports += "from troposphere.common import Tag\n" - modulecode = license_head + imports + "\n" + modulecode = docstring + imports return modulecode @@ -89,3 +89,9 @@ def class_format(self, classdata: ClassData) -> str: ) return classcode + + def between_class(self) -> str: + return "\n\n" + + def after_import(self) -> str: + return "\n\n" From 9a77e38f5bed4a824653382789525dcd4df08e34 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:39:47 +0100 Subject: [PATCH 34/62] Only fetch subproperties for properties --- troposphere_gen/codedata.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 6c0eff824..1088d12c3 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -65,7 +65,8 @@ def __init__(self, name: str, data: Union[Property, Resource]) -> None: self.data: Property = data self.subproperties: Dict[str, Property] = {} - self.get_subproperties() + if type(data) is Property: + self.get_subproperties() def get_subproperties(self) -> None: """Gets all subproperties of property""" From 191758e9f638ce9fb565bf55ccfeed3b6e110a1b Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 03:42:30 +0100 Subject: [PATCH 35/62] Prevent duplicate property definitions --- troposphere_gen/codedata.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 1088d12c3..c738ae495 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -46,9 +46,17 @@ def __init__(self, name: str): self.resources: Dict[str, ClassData] = {} def add_property(self, name: str, property: Property) -> None: + # Some properties are redefined for different Resources, but produce the exact same code. + for existingname in self.properties: + if class_name_from_property_name(existingname) == class_name_from_property_name(name): + return self.properties[name] = ClassData(name, property) def add_resource(self, name: str, resource: Resource) -> None: + # Some properties are redefined for different Resources, but produce the exact same code. + for existingname in self.resources: + if class_name_from_resource_name(existingname) == class_name_from_resource_name(name): + return self.resources[name] = ClassData(name, resource) @@ -65,8 +73,7 @@ def __init__(self, name: str, data: Union[Property, Resource]) -> None: self.data: Property = data self.subproperties: Dict[str, Property] = {} - if type(data) is Property: - self.get_subproperties() + self.get_subproperties() def get_subproperties(self) -> None: """Gets all subproperties of property""" From 01caa0ec23fa4ec519cd3877c628934cddfee4d0 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 04:20:21 +0100 Subject: [PATCH 36/62] Resolve name conflicts --- troposphere_gen/codedata.py | 5 +++++ troposphere_gen/generator.py | 3 +++ 2 files changed, 8 insertions(+) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index c738ae495..b43bb0211 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -59,6 +59,11 @@ def add_resource(self, name: str, resource: Resource) -> None: return self.resources[name] = ClassData(name, resource) + def resolve_name_conflicts(self): + for prop in self.properties.values(): + for resource in self.resources.values(): + if prop.classname == resource.classname: + prop.classname += "Property" class ClassData(): """Convert Property or Resource to required classdata""" diff --git a/troposphere_gen/generator.py b/troposphere_gen/generator.py index 06e715dff..309f7d608 100644 --- a/troposphere_gen/generator.py +++ b/troposphere_gen/generator.py @@ -25,6 +25,9 @@ def __init__(self, specification: Specification): self.gen_property_classdata() self.gen_resource_classdata() + for moddata in self.modules.values(): + moddata.resolve_name_conflicts() + def gen_property_classdata(self): """Generates class data for each property and adds it to module""" for name, property in self.specification.property_types.items(): From 1ef2f95787ef7e590539f2945c96537f8a44066f Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 04:20:53 +0100 Subject: [PATCH 37/62] Complete get_type --- troposphere_gen/policy.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index a47eb3ef5..0568b60ea 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -7,6 +7,7 @@ from troposphere_gen.codedata import ModuleData, ClassData from troposphere_gen.specification import Property, Resource +from troposphere_gen.types import ListType, MapType import re @@ -37,6 +38,19 @@ def get_type(self, prop: Property) -> str: if prop.primitive_type is not None: return type_map[prop.primitive_type.type] + else: + if type(prop.type) == ListType: + if prop.item_type is not None: + return f"List[{prop.item_type.type}]" + elif prop.primitive_item_type is not None: + return f"List[{prop.primitive_item_type.type}]" + elif type(prop.type) == MapType: + if prop.item_type is not None: + return f"Dict[str, {prop.item_type.type}]" + elif prop.primitive_item_type is not None: + return f"Dict[str, {prop.primitive_item_type.type}]" + else: + return prop.type.type def module_head_format(self, moduledata: ModuleData): """Construct module code From 1d20be789e1072f411cd796ee8c3cee46ebd8f84 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 05:16:17 +0100 Subject: [PATCH 38/62] Resolve class dependencies of properties --- troposphere_gen/codedata.py | 34 ++++++++++++++++++++++++++++++++-- troposphere_gen/generator.py | 1 + 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index b43bb0211..0c0d6e1cf 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -3,7 +3,9 @@ import re from troposphere_gen.specification import Property, Resource +from troposphere_gen.types import ListType, MapType +from collections import OrderedDict def module_name_from_namespace(namespace: str) -> str: """Parse module name from AWS namespace @@ -42,8 +44,8 @@ class ModuleData(): def __init__(self, name: str): self.modulename: str = name - self.properties: Dict[str, ClassData] = {} - self.resources: Dict[str, ClassData] = {} + self.properties: OrderedDict[str, ClassData] = OrderedDict() + self.resources: OrderedDict[str, ClassData] = OrderedDict() def add_property(self, name: str, property: Property) -> None: # Some properties are redefined for different Resources, but produce the exact same code. @@ -65,6 +67,34 @@ def resolve_name_conflicts(self): if prop.classname == resource.classname: prop.classname += "Property" + def resolve_dependencies(self): + """Make sure classes are defined before they are referenced by other classes""" + for i in range(100): # Make 100 attemps to fix dependencies + # Check if property references subproperty of following properties + # * Iterate over all properties + # * Check if any of the properties coming AFTER current property is + # referenced by checkprop + # * If yes, move checkprop to end of properties + # * Do 100 attempts at bubble-sort-ish resolving before failing + done = True # Done if clean runthrough with no deps is achieved + propertiescopy = self.properties.copy() + for checkidx, checkitem in enumerate(propertiescopy.items()): + checkname, checkprop = checkitem + for idx, prop in enumerate(propertiescopy.values()): + if idx <= checkidx: # Gone past check prop + continue + for subidx, subprop in enumerate(checkprop.subproperties.values()): + if subprop.type is not None: + if type(subprop.type) is ListType or type(subprop.type) is MapType: + continue # List and Maps are unaffected + if subprop.type.type == prop.classname: + self.properties.move_to_end(checkname, last=True) + done = False + if done: + break + else: # Only triggered if for-loop finishes without break! + raise Exception(f"Couldn't resolve possible dependency cycle in {self.modulename}") + class ClassData(): """Convert Property or Resource to required classdata""" diff --git a/troposphere_gen/generator.py b/troposphere_gen/generator.py index 309f7d608..f039a27e5 100644 --- a/troposphere_gen/generator.py +++ b/troposphere_gen/generator.py @@ -27,6 +27,7 @@ def __init__(self, specification: Specification): for moddata in self.modules.values(): moddata.resolve_name_conflicts() + moddata.resolve_dependencies() def gen_property_classdata(self): """Generates class data for each property and adds it to module""" From 7f927ef8f3e2ee098eb2690070fd9b26de3fa700 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 14:01:20 +0100 Subject: [PATCH 39/62] Fix List and Map type generation --- troposphere_gen/policy.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 0568b60ea..2182fd8c4 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -40,15 +40,15 @@ def get_type(self, prop: Property) -> str: return type_map[prop.primitive_type.type] else: if type(prop.type) == ListType: - if prop.item_type is not None: - return f"List[{prop.item_type.type}]" - elif prop.primitive_item_type is not None: - return f"List[{prop.primitive_item_type.type}]" + if prop.type.itemtype.type in type_map: + return f"List[{type_map[prop.type.itemtype.type]}]" + else: + return f"List[{prop.type.itemtype.type}]" elif type(prop.type) == MapType: - if prop.item_type is not None: - return f"Dict[str, {prop.item_type.type}]" - elif prop.primitive_item_type is not None: - return f"Dict[str, {prop.primitive_item_type.type}]" + if prop.type.itemtype.type in type_map: + return f"Dict[str, {type_map[prop.type.itemtype.type]}]" + else: + return f"Dict[str, {prop.type.itemtype.type}]" else: return prop.type.type From 688a417190decc6df438ea69ab7d83ea5316b295 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 14:03:31 +0100 Subject: [PATCH 40/62] Remove unused attributes --- troposphere_gen/specification.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/troposphere_gen/specification.py b/troposphere_gen/specification.py index e6e1e363c..1788f8820 100644 --- a/troposphere_gen/specification.py +++ b/troposphere_gen/specification.py @@ -14,8 +14,6 @@ class Attribute(): def __init__(self, name: str, attributedict: Dict) -> None: self.name: str = name - self.item_type: Subproperty = None - self.primitive_item_type: PrimitiveType = None self.primitive_type: PrimitiveType = None self.type: Union[Subproperty, ListType, MapType] = None From a37e17f56aecee2d4e3801a378aee2d4e149e885 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 14:23:34 +0100 Subject: [PATCH 41/62] Take Lists and Maps into account --- troposphere_gen/codedata.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 0c0d6e1cf..3a27cdec5 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -86,10 +86,15 @@ def resolve_dependencies(self): for subidx, subprop in enumerate(checkprop.subproperties.values()): if subprop.type is not None: if type(subprop.type) is ListType or type(subprop.type) is MapType: - continue # List and Maps are unaffected - if subprop.type.type == prop.classname: + if subprop.type.itemtype.type == prop.classname: + self.properties.move_to_end(checkname, last=True) + done = False + elif subprop.type.type == prop.classname: self.properties.move_to_end(checkname, last=True) done = False + if done is False: + # Something was moved, continue with next item + break if done: break else: # Only triggered if for-loop finishes without break! From beb6b7176b374e84dbbeefd275b5f4c5d775a3c8 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 15:01:40 +0100 Subject: [PATCH 42/62] Resolve Conflicts of property names --- troposphere_gen/codedata.py | 23 +++++++++++++++++++---- troposphere_gen/policy.py | 19 ++++++++++++------- 2 files changed, 31 insertions(+), 11 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 3a27cdec5..3594d3c24 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -1,4 +1,4 @@ -from typing import Dict, Union +from typing import Dict, Union, List import re @@ -44,6 +44,8 @@ class ModuleData(): def __init__(self, name: str): self.modulename: str = name + self.conflictednames: List[str] = [] # List of conflicted properties + self.properties: OrderedDict[str, ClassData] = OrderedDict() self.resources: OrderedDict[str, ClassData] = OrderedDict() @@ -62,10 +64,21 @@ def add_resource(self, name: str, resource: Resource) -> None: self.resources[name] = ClassData(name, resource) def resolve_name_conflicts(self): - for prop in self.properties.values(): + # Detect if any property has the same name as resource + # If yes, append 'Property' to the class name + for cd in self.properties.values(): for resource in self.resources.values(): - if prop.classname == resource.classname: - prop.classname += "Property" + if cd.classname == resource.classname: + self.conflictednames.append(cd.classname) + cd.classname += "Property" + + # Now we have to replace all occurences of this Property with the + # adjusted name + for cd in self.properties.values(): + for name, subprop in cd.subproperties.items(): + if type(subprop.type) is ListType or type(subprop.type) is MapType: + if subprop.type.itemtype.type in self.conflictednames: + cd.conflictedproperties.append(name) def resolve_dependencies(self): """Make sure classes are defined before they are referenced by other classes""" @@ -112,6 +125,8 @@ def __init__(self, name: str, data: Union[Property, Resource]) -> None: self.classname: str = class_name_from_resource_name(name) self.data: Property = data + self.conflictedproperties: List[str] = [] + self.subproperties: Dict[str, Property] = {} self.get_subproperties() diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 2182fd8c4..8868dae3f 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -24,7 +24,7 @@ class Policy(): def __init__(self): pass - def get_type(self, prop: Property) -> str: + def get_type(self, prop: Property, deconflict: bool=False) -> str: type_map = { "String": "str", "Long": "int", @@ -36,21 +36,25 @@ def get_type(self, prop: Property) -> str: "Map": "Dict" # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove } + deconflicter: str = "" + if deconflict: + deconflicter = "Property" + if prop.primitive_type is not None: return type_map[prop.primitive_type.type] else: if type(prop.type) == ListType: if prop.type.itemtype.type in type_map: - return f"List[{type_map[prop.type.itemtype.type]}]" + return f"List[{type_map[prop.type.itemtype.type]}{deconflicter}]" else: - return f"List[{prop.type.itemtype.type}]" + return f"List[{prop.type.itemtype.type}{deconflicter}]" elif type(prop.type) == MapType: if prop.type.itemtype.type in type_map: - return f"Dict[str, {type_map[prop.type.itemtype.type]}]" + return f"Dict[str, {type_map[prop.type.itemtype.type]}{deconflicter}]" else: - return f"Dict[str, {prop.type.itemtype.type}]" + return f"Dict[str, {prop.type.itemtype.type}]{deconflicter}" else: - return prop.type.type + return f"{prop.type.type}{deconflicter}" def module_head_format(self, moduledata: ModuleData): """Construct module code @@ -93,7 +97,8 @@ def class_format(self, classdata: ClassData) -> str: properties: str = "" for name, prop in classdata.subproperties.items(): - properties += f" '{cc_to_sc(name)}': ({self.get_type(prop)}, {prop.required}),\n" + conflicted = name in classdata.conflictedproperties + properties += f" '{cc_to_sc(name)}': ({self.get_type(prop, conflicted)}, {prop.required}),\n" classcode = ( f"class {classdata.classname}({parentclass}):\n" From be9fe50f4839e9132932f503c0a5936aae56d074 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 15:05:22 +0100 Subject: [PATCH 43/62] Add auto-generation notice --- troposphere_gen/policy.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 8868dae3f..01e9b1fdf 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -6,7 +6,7 @@ """ from troposphere_gen.codedata import ModuleData, ClassData -from troposphere_gen.specification import Property, Resource +from troposphere_gen.specification import Property, Resource, Specification from troposphere_gen.types import ListType, MapType import re @@ -56,7 +56,7 @@ def get_type(self, prop: Property, deconflict: bool=False) -> str: else: return f"{prop.type.type}{deconflicter}" - def module_head_format(self, moduledata: ModuleData): + def module_head_format(self, moduledata: ModuleData, specification: Specification): """Construct module code """ @@ -73,6 +73,9 @@ def module_head_format(self, moduledata: ModuleData): f"All rights reserved.\n" f"\n" f"See LICENSE file for full license.\n" + f"\n" + f"AUOTGENERATED CODE, DO NOT EDIT!\n" + f"Generated from Specification Version {specification.resource_specification_version}\n" f"\"\"\"\n" ) From 3bd6715d85ba32f3290ff70a9641a9c68aaee635 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 15:34:37 +0100 Subject: [PATCH 44/62] Make policy abstract --- troposphere_gen/policy.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 01e9b1fdf..e48a385fe 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -21,10 +21,24 @@ def cc_to_sc(name: str) -> str: class Policy(): - def __init__(self): + def get_type(self, prop: Property, deconflict: bool = False) -> str: pass - def get_type(self, prop: Property, deconflict: bool=False) -> str: + def module_head_format(self, moduledata: ModuleData, specification: Specification): + pass + + def class_format(self, classdata: ClassData) -> str: + pass + + def between_class(self) -> str: + pass + + def after_import(self) -> str: + pass + + +class Policy_3_7(Policy): + def get_type(self, prop: Property, deconflict: bool = False) -> str: type_map = { "String": "str", "Long": "int", @@ -33,7 +47,8 @@ def get_type(self, prop: Property, deconflict: bool=False) -> str: "Boolean": "bool", "Timestamp": "str", # TODO: Add Timestamp class to troposphere "Json": "Dict", - "Map": "Dict" # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove + "Map": "Dict" + # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove } deconflicter: str = "" @@ -67,6 +82,11 @@ def module_head_format(self, moduledata: ModuleData, specification: Specificatio # # See LICENSE file for full license. + versionstring = str(specification.resource_specification_version) + if specification.resource_specification_version.version[2] == 0: + # StrictVersion doesn't print patch if it's 0 + versionstring += ".0" + docstring = ( f"\"\"\"Module for AWS {modulename} service\n" f"Copyright (c) 2012-{datetime.datetime.now().year}, Mark Peek \n" @@ -75,7 +95,7 @@ def module_head_format(self, moduledata: ModuleData, specification: Specificatio f"See LICENSE file for full license.\n" f"\n" f"AUOTGENERATED CODE, DO NOT EDIT!\n" - f"Generated from Specification Version {specification.resource_specification_version}\n" + f"Generated from Specification Version {versionstring}\n" f"\"\"\"\n" ) From 97aad233ccb35abe241e9aa58f22b131f041d7f4 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 15:40:22 +0100 Subject: [PATCH 45/62] Add python 2.7 policy --- troposphere_gen/policy.py | 95 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index e48a385fe..2ba65f6de 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -137,3 +137,98 @@ def between_class(self) -> str: def after_import(self) -> str: return "\n\n" + + +class Policy_2_7(Policy): + def get_type(self, prop: Property, deconflict: bool = False) -> str: + type_map = { + "String": "basestring", + "Long": "int", + "Integer": "int", + "Double": "float", + "Boolean": "boolean", + "Timestamp": "basestring", # TODO: Add Timestamp class to troposphere + "Json": "dict", + "Map": "dict" + # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove + } + + deconflicter: str = "" + if deconflict: + deconflicter = "Property" + + if prop.primitive_type is not None: + return type_map[prop.primitive_type.type] + else: + if type(prop.type) == ListType: + return "list" + elif type(prop.type) == MapType: + return "dict" + else: + return f"{prop.type.type}{deconflicter}" + + def module_head_format(self, moduledata: ModuleData, specification: Specification): + """Construct module code + + """ + modulename: str = moduledata.modulename + + # Copyright (c) 2012-2018, Mark Peek + # All rights reserved. + # + # See LICENSE file for full license. + + versionstring = str(specification.resource_specification_version) + if specification.resource_specification_version.version[2] == 0: + # StrictVersion doesn't print patch if it's 0 + versionstring += ".0" + + docstring = ( + f"\"\"\"Module for AWS {modulename} service\n" + f"Copyright (c) 2012-{datetime.datetime.now().year}, Mark Peek \n" + f"All rights reserved.\n" + f"\n" + f"See LICENSE file for full license.\n" + f"\n" + f"AUOTGENERATED CODE, DO NOT EDIT!\n" + f"Generated from Specification Version {versionstring}\n" + f"\"\"\"\n" + ) + + imports = "\nfrom troposphere import AWSProperty, AWSObject\n" + if modulename is not "common": + imports += "from troposphere.common import Tag\n" + + modulecode = docstring + imports + + return modulecode + + def class_format(self, classdata: ClassData) -> str: + """Construct class code + + """ + + if type(classdata.data) is Property: + parentclass: str = "AWSProperty" + elif type(classdata.data) is Resource: + parentclass: str = "AWSObject" + + properties: str = "" + for name, prop in classdata.subproperties.items(): + conflicted = name in classdata.conflictedproperties + properties += f" '{cc_to_sc(name)}': ({self.get_type(prop, conflicted)}, {prop.required}),\n" + + classcode = ( + f"class {classdata.classname}({parentclass}):\n" + f" props = {{\n" + f"{properties}" + f" }}\n" + ) + + return classcode + + def between_class(self) -> str: + return "\n\n" + + def after_import(self) -> str: + return "\n\n" From e07bc561cde989d09cccf56aeb9740f2b487bced Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 15:41:12 +0100 Subject: [PATCH 46/62] Add generator script --- scripts/gen.py | 213 ++++++------------------------------------------- 1 file changed, 24 insertions(+), 189 deletions(-) diff --git a/scripts/gen.py b/scripts/gen.py index e6354208f..231fc022d 100644 --- a/scripts/gen.py +++ b/scripts/gen.py @@ -1,199 +1,34 @@ -import argparse -import json - -import sys - - -# Python code generator to create new troposphere classes from the -# AWS resource specification. -# -# Todo: -# - Currently only handles the single files (not the all-in-one) -# (Note: but will deal with things like spec/GuardDuty*) -# - Handle adding in validators -# - Verify propery dependency/ordering in the file -# - Needs better error checking -# - Need to figure out the correct Timestamp type - -copyright_header = """\ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer -""" - - -def get_required(value): - return value['Required'] - - -map_type = { - 'Boolean': 'boolean', - 'Double': 'float', - 'Integer': 'integer', - 'Json': 'dict', - 'Long': 'integer', - 'String': 'basestring', - 'Timestamp': 'basestring', -} - - -map_type3 = { - 'Boolean': 'bool', - 'Double': 'float', - 'Integer': 'int', - 'Json': 'dict', - 'Long': 'int', - 'String': 'str', - 'Timestamp': 'str', -} - - -def get_type(value): - if 'PrimitiveType' in value: - return map_type.get(value['PrimitiveType'], value['PrimitiveType']) - if value['Type'] == 'List': - if 'ItemType' in value: - return "[%s]" % value['ItemType'] - else: - return "[%s]" % map_type.get(value['PrimitiveItemType']) - elif value['Type'] == 'Map': - return 'dict' - else: - # Non-primitive (Property) name - return value['Type'] +from troposphere_gen.specification import Specification +from troposphere_gen.generator import Generator +from troposphere_gen.policy import * +from troposphere_gen.policy import cc_to_sc - import pprint - pprint.pprint(value) - raise ValueError("get_type") - - -def get_type3(value): - if 'PrimitiveType' in value: - return map_type3.get(value['PrimitiveType'], value['PrimitiveType']) - if value['Type'] == 'List': - if 'ItemType' in value: - return "[%s]" % value['ItemType'] - else: - return "[%s]" % map_type3.get(value['PrimitiveItemType']) - elif value['Type'] == 'Map': - return 'dict' - else: - # Non-primitive (Property) name - return value['Type'] - - import pprint - pprint.pprint(value) - raise ValueError("get_type") - - -def output_class(class_name, properties, resource_name=None): - print - print - if resource_name: - print 'class %s(AWSObject):' % class_name - print ' resource_type = "%s"' % resource_name - print - else: - print 'class %s(AWSProperty):' % class_name - - # Output the props dict - print ' props = {' - for key, value in sorted(properties.iteritems()): - if key == 'Tags': - value_type = "Tags" - else: - value_type = get_type(value) - - # Wrap long names for pycodestyle - if len(key) + len(value_type) < 55: - print " '%s': (%s, %s)," % ( - key, value_type, get_required(value)) - else: - print " '%s':\n (%s, %s)," % ( - key, value_type, get_required(value)) - print ' }' - - -def output_class_stub(class_name, properties, resource_name=None): - print - print - if resource_name: - print 'class %s(AWSObject):' % class_name - print ' resource_type: str' - print - sys.stdout.write(' def __init__(self, title') - else: - print 'class %s(AWSProperty):' % class_name - print - sys.stdout.write(' def __init__(self') - - for key, value in sorted(properties.iteritems()): - if key == 'Tags': - value_type = "Tags" - else: - value_type = get_type3(value) - - if value_type.startswith("["): # Means that args are a list - sys.stdout.write(', %s:List%s=...' % (key, value_type)) - else: - sys.stdout.write(', %s:%s=...' % (key, value_type)) - - print ') -> None: ...' - print +import json - for key, value in sorted(properties.iteritems()): - if key == 'Tags': - value_type = "Tags" - else: - value_type = get_type3(value) +from collections import OrderedDict - if value_type.startswith("["): # Means that args are a list - print ' %s: List%s' % (key, value_type) - else: - print ' %s: %s' % (key, value_type) +def generate(specificationfile: str, outdir: str, policy: Policy): + with open(specificationfile, "r") as f: + specdata = json.load(f, object_pairs_hook=OrderedDict) -def process_file(filename, stub=False): - f = open(filename) - j = json.load(f) + spec = Specification(specdata) + gen = Generator(spec) - if 'PropertyTypes' in j: - for property_name, property_dict in j['PropertyTypes'].items(): - if property_name == "Tag": - print "from troposphere import Tags" - print - continue - class_name = property_name.split('.')[1] - properties = property_dict['Properties'] - if stub: - output_class_stub(class_name, properties) - else: - output_class(class_name, properties) + for name, module in gen.modules.items(): + with open(outdir + cc_to_sc(name) + ".py", "w") as f: + f.write(policy.module_head_format(module, spec)) + f.write(policy.after_import()) - for resource_name, resource_dict in j['ResourceType'].items(): - class_name = resource_name.split(':')[4] - properties = resource_dict['Properties'] - if stub: - output_class_stub(class_name, properties, resource_name) - else: - output_class(class_name, properties, resource_name) + for name, cd in module.properties.items(): + f.write(policy.class_format(cd)) + f.write(policy.between_class()) + for name, cd in module.resources.items(): + f.write(policy.class_format(cd)) + f.write(policy.between_class()) -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--stub', action='store_true', default=False) - parser.add_argument('filename', nargs='+') - args = parser.parse_args() - if args.stub: - print copyright_header, - for f in args.filename: - process_file(f, stub=True) - else: - print copyright_header, - for f in args.filename: - process_file(f) +if __name__ == "__main__": + generate("CloudFormationResourceSpecification.json", "build/2.7/", Policy_2_7()) + generate("CloudFormationResourceSpecification.json", "build/3.7/", Policy_3_7()) From bf5b76926566c86b25eb7885335303d07884a23d Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 27 Jan 2019 18:22:26 +0100 Subject: [PATCH 47/62] Add init --- troposphere_gen/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 troposphere_gen/__init__.py diff --git a/troposphere_gen/__init__.py b/troposphere_gen/__init__.py new file mode 100644 index 000000000..e69de29bb From 516b1d20d6da6f43f74e260bbae4cd09998600f1 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 2 Feb 2019 17:23:00 +0100 Subject: [PATCH 48/62] Add validatordata --- troposphere_gen/codedata.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 3594d3c24..91254ab58 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -4,6 +4,7 @@ from troposphere_gen.specification import Property, Resource from troposphere_gen.types import ListType, MapType +from troposphere_gen.validatordata import ValidatorData from collections import OrderedDict @@ -116,14 +117,15 @@ def resolve_dependencies(self): class ClassData(): """Convert Property or Resource to required classdata""" - def __init__(self, name: str, data: Union[Property, Resource]) -> None: - if type(data) is Property and data.common: + def __init__(self, name: str, specdata: Union[Property, Resource], validatordata: ValidatorData) -> None: + if type(specdata) is Property and specdata.common: self.classname: str = name - elif type(data) is Property: + elif type(specdata) is Property: self.classname: str = class_name_from_property_name(name) - elif type(data) is Resource: + elif type(specdata) is Resource: self.classname: str = class_name_from_resource_name(name) - self.data: Property = data + self.data: Property = specdata + self.validatordata: ValidatorData = validatordata self.conflictedproperties: List[str] = [] From 4af8cd1bab991ad37fbd5e02ff0bd02fe783bbe2 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 2 Feb 2019 17:25:11 +0100 Subject: [PATCH 49/62] Add validatordata --- troposphere_gen/validatordata.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 troposphere_gen/validatordata.py diff --git a/troposphere_gen/validatordata.py b/troposphere_gen/validatordata.py new file mode 100644 index 000000000..e980f8e3a --- /dev/null +++ b/troposphere_gen/validatordata.py @@ -0,0 +1,15 @@ +"""Data container for field validation in properties and resources + + + +""" + +from typing import Dict + +class ValidatorData(): + def __init__(self, validatordata: Dict): + self.parse(validatordata) + + + def parse(self, validatordata: Dict): + pass From 4bc4ffb6042376ff84fb48f9b7824d55b257c998 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 2 Feb 2019 17:27:06 +0100 Subject: [PATCH 50/62] Add validatordata --- troposphere_gen/codedata.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/troposphere_gen/codedata.py b/troposphere_gen/codedata.py index 91254ab58..3ad48aa28 100644 --- a/troposphere_gen/codedata.py +++ b/troposphere_gen/codedata.py @@ -50,19 +50,19 @@ def __init__(self, name: str): self.properties: OrderedDict[str, ClassData] = OrderedDict() self.resources: OrderedDict[str, ClassData] = OrderedDict() - def add_property(self, name: str, property: Property) -> None: + def add_property(self, name: str, property: Property, validatordata: ValidatorData=None) -> None: # Some properties are redefined for different Resources, but produce the exact same code. for existingname in self.properties: if class_name_from_property_name(existingname) == class_name_from_property_name(name): return - self.properties[name] = ClassData(name, property) + self.properties[name] = ClassData(name, property, validatordata) - def add_resource(self, name: str, resource: Resource) -> None: + def add_resource(self, name: str, resource: Resource, validatordata: ValidatorData=None) -> None: # Some properties are redefined for different Resources, but produce the exact same code. for existingname in self.resources: if class_name_from_resource_name(existingname) == class_name_from_resource_name(name): return - self.resources[name] = ClassData(name, resource) + self.resources[name] = ClassData(name, resource, validatordata) def resolve_name_conflicts(self): # Detect if any property has the same name as resource From ae61c8f1efb76d764ede2ed37ce1b831d94b2158 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Thu, 14 Feb 2019 10:56:21 +0100 Subject: [PATCH 51/62] Add validators --- troposphere_gen/generator.py | 19 ++++- troposphere_gen/policy.py | 132 +++++++++++++++++++++++++------ troposphere_gen/validatordata.py | 16 ++++ 3 files changed, 142 insertions(+), 25 deletions(-) diff --git a/troposphere_gen/generator.py b/troposphere_gen/generator.py index f039a27e5..36d08cc82 100644 --- a/troposphere_gen/generator.py +++ b/troposphere_gen/generator.py @@ -7,6 +7,7 @@ from troposphere_gen.specification import Specification from troposphere_gen.codedata import ModuleData from troposphere_gen.codedata import module_name_from_namespace +from troposphere_gen.validatordata import ValidatorData from typing import Dict @@ -17,8 +18,14 @@ class Generator(): - def __init__(self, specification: Specification): + def __init__(self, specification: Specification, validationdict: Dict): self.specification: Specification = specification + self.validationdict: Dict = validationdict + if validationdict is None: + self.validationdict = { + "PropertyTypes": {}, + "ResourceTypes": {} + } self.modules: Dict[str, ModuleData] = {} @@ -34,14 +41,20 @@ def gen_property_classdata(self): for name, property in self.specification.property_types.items(): moddata = self.get_module(name) - moddata.add_property(name, property) + if name in self.validationdict["PropertyTypes"]: + moddata.add_property(name, property, ValidatorData(self.validationdict["PropertyTypes"][name])) + else: + moddata.add_property(name, property) def gen_resource_classdata(self): """Generates class data for each property and adds it to module""" for name, resource in self.specification.resource_types.items(): moddata = self.get_module(name) - moddata.add_resource(name, resource) + if name in self.validationdict["ResourceTypes"]: + moddata.add_resource(name, resource, ValidatorData(self.validationdict["ResourceTypes"][name])) + else: + moddata.add_resource(name, resource) def get_module(self, name: str) -> ModuleData: """Find or create module from namespaced name""" diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 2ba65f6de..6273e2fc3 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -7,12 +7,15 @@ from troposphere_gen.codedata import ModuleData, ClassData from troposphere_gen.specification import Property, Resource, Specification -from troposphere_gen.types import ListType, MapType +from troposphere_gen.types import ListType, MapType, PrimitiveType, Subproperty +from troposphere_gen.validatordata import ValidatorData import re import datetime +from typing import Union + def cc_to_sc(name: str) -> str: """Convert CamelCase to snake_case""" @@ -38,39 +41,50 @@ def after_import(self) -> str: class Policy_3_7(Policy): - def get_type(self, prop: Property, deconflict: bool = False) -> str: - type_map = { - "String": "str", - "Long": "int", - "Integer": "int", - "Double": "float", - "Boolean": "bool", - "Timestamp": "str", # TODO: Add Timestamp class to troposphere - "Json": "Dict", - "Map": "Dict" + type_map = { + "String": "str", + "Long": "int", + "Integer": "int", + "Double": "float", + "Boolean": "bool", + "Timestamp": "str", # TODO: Add Timestamp class to troposphere + "Json": "Dict", + "Map": "Dict" # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove - } + } + def get_type(self, prop: Property, deconflict: bool = False) -> str: deconflicter: str = "" if deconflict: deconflicter = "Property" if prop.primitive_type is not None: - return type_map[prop.primitive_type.type] + return Policy_3_7.type_map[prop.primitive_type.type] else: if type(prop.type) == ListType: - if prop.type.itemtype.type in type_map: - return f"List[{type_map[prop.type.itemtype.type]}{deconflicter}]" + if prop.type.itemtype.type in Policy_3_7.type_map: + return f"List[{Policy_3_7.type_map[prop.type.itemtype.type]}{deconflicter}]" else: return f"List[{prop.type.itemtype.type}{deconflicter}]" elif type(prop.type) == MapType: - if prop.type.itemtype.type in type_map: - return f"Dict[str, {type_map[prop.type.itemtype.type]}{deconflicter}]" + if prop.type.itemtype.type in Policy_3_7.type_map: + return f"Dict[str, {Policy_3_7.type_map[prop.type.itemtype.type]}{deconflicter}]" else: return f"Dict[str, {prop.type.itemtype.type}]{deconflicter}" else: return f"{prop.type.type}{deconflicter}" + def get_itemtype(self, listmap: Union[ListType, MapType], deconflict: bool = False) -> str: + deconflicter: str = "" + if deconflict: + deconflicter = "Property" + + itemtype: Union[Subproperty, PrimitiveType] = listmap.itemtype + if type(itemtype) is Subproperty: + return itemtype.type + deconflicter + elif type(itemtype) is PrimitiveType: + return Policy_3_7.type_map[itemtype.type] + def module_head_format(self, moduledata: ModuleData, specification: Specification): """Construct module code @@ -99,8 +113,9 @@ def module_head_format(self, moduledata: ModuleData, specification: Specificatio f"\"\"\"\n" ) - imports = "\nfrom troposphere import AWSProperty, AWSObject\n" + imports = "\nfrom troposphere.aws_objects import AWSProperty, AWSObject\n" imports += "from typing import Dict, List\n" + imports += "from troposphere.validators import *\n" if modulename is not "common": imports += "from troposphere.common import Tag\n" @@ -113,28 +128,101 @@ def class_format(self, classdata: ClassData) -> str: """ + # Determine type of class if type(classdata.data) is Property: parentclass: str = "AWSProperty" elif type(classdata.data) is Resource: parentclass: str = "AWSObject" - properties: str = "" + # Determine list of properties relevant for Property/Resource + prop_dict: str = "" for name, prop in classdata.subproperties.items(): conflicted = name in classdata.conflictedproperties - properties += f" '{cc_to_sc(name)}': ({self.get_type(prop, conflicted)}, {prop.required}),\n" + prop_dict += f" '{cc_to_sc(name)}': {self.get_type(prop, conflicted)},\n" + + # Generate init signature + init_code = " def __init__(self,\n" + for name, prop in classdata.subproperties.items(): + conflicted = name in classdata.conflictedproperties + init_code += f" {cc_to_sc(name)}: {self.get_type(prop, conflicted)} = None,\n" + init_code += " ):\n" + + # Generate field declarations + for name, prop in classdata.subproperties.items(): + conflicted = name in classdata.conflictedproperties + init_code += f" self._{cc_to_sc(name)}: {self.get_type(prop, conflicted)} = None\n" + init_code += f" self.{cc_to_sc(name)} = {cc_to_sc(name)}\n" + + property_funcs = "" + for name, prop in classdata.subproperties.items(): + conflicted = name in classdata.conflictedproperties + + property_funcs += self.between_functions() + property_funcs += self.property_getter(prop, conflicted) + property_funcs += self.between_functions() + property_funcs += self.property_setter(prop, classdata, conflicted) classcode = ( f"class {classdata.classname}({parentclass}):\n" f" props = {{\n" - f"{properties}" + f"{prop_dict}" f" }}\n" + f"\n" + f"{init_code}" + f"{property_funcs}" ) return classcode + def property_getter(self, propertydata: Property, conflicted: bool) -> str: + return ( + f" @property\n" + f" def {cc_to_sc(propertydata.name)}(self) -> {self.get_type(propertydata, conflicted)}:\n" + f" return self._{cc_to_sc(propertydata.name)}\n" + ) + + def property_setter(self, propertydata: Property, classdata: ClassData, conflicted: bool) -> str: + # Generate type-checker + if type(propertydata.type) == ListType: + type_check = ( + f" if not isinstance(value, list):\n" + f" raise ValueError(\"{cc_to_sc(propertydata.name)} must be of type 'list' (is: '%s')\" % type(value))\n" + f" for listitem in value:\n" + f" if not isinstance(listitem, {self.get_itemtype(propertydata.type)}):\n" + f" raise ValueError(\"{cc_to_sc(propertydata.name)} list-items must be of type '{self.get_itemtype(propertydata.type)}' (is: '%s')\" % type(listitem))\n" + ) + elif type(propertydata.type) == MapType: + type_check = ( + f" if not isinstance(value, dict):\n" + f" raise ValueError(\"'{cc_to_sc(propertydata.name)}' must be of type 'dict' (is: '%s')\" % type(value))\n" + f" for k, v in value.items():\n" + f" if not isinstance(k, str):\n" + f" raise ValueError(\"{cc_to_sc(propertydata.name)} map-keys must be of type 'str' (is: '%s')\" % type(k))\n" + f" if not isinstance(v, {self.get_itemtype(propertydata.type)}):\n" + f" raise ValueError(\"{cc_to_sc(propertydata.name)} map-values must be of type '{self.get_itemtype(propertydata.type)}' (is: '%s')\" % type(v))\n" + ) + else: # Subproperty or primitive type + type_check = ( + f" if not isinstance(value, {self.get_type(propertydata, conflicted)}):\n" + f" raise ValueError(\"{cc_to_sc(propertydata.name)} must be of type '{self.get_type(propertydata, conflicted)}' (is: '%s')\" % type(value))\n" + ) + + return ( + f" @{cc_to_sc(propertydata.name)}.setter\n" + f" def {cc_to_sc(propertydata.name)}(self, value: {self.get_type(propertydata, conflicted)}) -> None:\n" + f" if value is None:\n" + f" self._{cc_to_sc(propertydata.name)} = None\n" + f" return\n" + f"{type_check}" + f" self._{cc_to_sc(propertydata.name)} = value\n" + ) + def between_class(self) -> str: return "\n\n" + def between_functions(self) -> str: + return " \n" + def after_import(self) -> str: return "\n\n" @@ -150,7 +238,7 @@ def get_type(self, prop: Property, deconflict: bool = False) -> str: "Timestamp": "basestring", # TODO: Add Timestamp class to troposphere "Json": "dict", "Map": "dict" - # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove + # Workaround for AWS::ServiceDiscovery::Instance.InstanceAttributes, see types.py TODO: remove } deconflicter: str = "" diff --git a/troposphere_gen/validatordata.py b/troposphere_gen/validatordata.py index e980f8e3a..5a6f2d667 100644 --- a/troposphere_gen/validatordata.py +++ b/troposphere_gen/validatordata.py @@ -10,6 +10,22 @@ class ValidatorData(): def __init__(self, validatordata: Dict): self.parse(validatordata) + self.validator: BaseValidator = None + def parse(self, validatordata: Dict): + validatormap = { + "regex": RegexValidator, + "Map": MapValidator, + } + + +class BaseValidator: + pass + +class RegexValidator(BaseValidator): + def __init__(self, validatordata: Dict): pass + +class MapValidator(BaseValidator): + pass From e370da6dddb74710378823042f8f6f9d7bf93601 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 15 Feb 2019 11:06:53 +0100 Subject: [PATCH 52/62] Simplify validator data parsing --- troposphere_gen/validatordata.py | 51 +++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/troposphere_gen/validatordata.py b/troposphere_gen/validatordata.py index 5a6f2d667..cb4914948 100644 --- a/troposphere_gen/validatordata.py +++ b/troposphere_gen/validatordata.py @@ -8,24 +8,47 @@ class ValidatorData(): def __init__(self, validatordata: Dict): + self.validators: Dict[str, Validator] = {} self.parse(validatordata) - self.validator: BaseValidator = None - - def parse(self, validatordata: Dict): - validatormap = { - "regex": RegexValidator, - "Map": MapValidator, - } + for propname, validatordict in validatordata["Properties"].items(): + self.validators[propname] = Validator(validatordict) -class BaseValidator: - pass - -class RegexValidator(BaseValidator): +class Validator(): def __init__(self, validatordata: Dict): - pass + # Name of validator function and kwargs to be passed to it + self.function: str = None + self.kwargs: Dict[str, str] = {} + + # In case of a map we need a validator for both key and value + self.map_key_function: str = None + self.map_value_function: str = None + self.map_key_kwargs: Dict[str, str] = {} + self.map_value_kwargs: Dict[str, str] = {} -class MapValidator(BaseValidator): - pass + self.parse(validatordata) + + def parse(self, validatordata: Dict): + # 'Map' type validator is the only special case + if validatordata["Validator"] == "Map": + keydata = validatordata["ValidatorKey"] + for k, v in keydata.items(): + if k == "Validator": + self.map_key_function = v + else: + self.map_key_kwargs[k] = v + + valuedata = validatordata["ValidatorValue"] + for k, v in valuedata.items(): + if k == "Validator": + self.map_value_function = v + else: + self.map_value_kwargs[k] = v + else: + for k, v in validatordata.items(): + if k == "Validator": + self.function = v + else: + self.kwargs[k] = v From b92a91e4c42b817f396df97d9d70d543dd45c007 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 15 Feb 2019 11:07:22 +0100 Subject: [PATCH 53/62] Add validators to generated code --- troposphere_gen/policy.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 6273e2fc3..9636c04ce 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -115,7 +115,7 @@ def module_head_format(self, moduledata: ModuleData, specification: Specificatio imports = "\nfrom troposphere.aws_objects import AWSProperty, AWSObject\n" imports += "from typing import Dict, List\n" - imports += "from troposphere.validators import *\n" + imports += "from troposphere import validators\n" if modulename is not "common": imports += "from troposphere.common import Tag\n" @@ -191,6 +191,11 @@ def property_setter(self, propertydata: Property, classdata: ClassData, conflict f" if not isinstance(listitem, {self.get_itemtype(propertydata.type)}):\n" f" raise ValueError(\"{cc_to_sc(propertydata.name)} list-items must be of type '{self.get_itemtype(propertydata.type)}' (is: '%s')\" % type(listitem))\n" ) + if classdata.validatordata is not None and propertydata.name in classdata.validatordata.validators: + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].function}(value, self" + for k, v in classdata.validatordata.validators[propertydata.name].kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" elif type(propertydata.type) == MapType: type_check = ( f" if not isinstance(value, dict):\n" @@ -201,11 +206,25 @@ def property_setter(self, propertydata: Property, classdata: ClassData, conflict f" if not isinstance(v, {self.get_itemtype(propertydata.type)}):\n" f" raise ValueError(\"{cc_to_sc(propertydata.name)} map-values must be of type '{self.get_itemtype(propertydata.type)}' (is: '%s')\" % type(v))\n" ) + if classdata.validatordata is not None and propertydata.name in classdata.validatordata.validators: + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_key_function}(k, self" + for k, v in classdata.validatordata.validators[propertydata.name].map_key_kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_value_function}(v, self" + for k, v in classdata.validatordata.validators[propertydata.name].map_value_kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" else: # Subproperty or primitive type type_check = ( f" if not isinstance(value, {self.get_type(propertydata, conflicted)}):\n" f" raise ValueError(\"{cc_to_sc(propertydata.name)} must be of type '{self.get_type(propertydata, conflicted)}' (is: '%s')\" % type(value))\n" ) + if classdata.validatordata is not None and propertydata.name in classdata.validatordata.validators: + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].function}(value, self" + for k, v in classdata.validatordata.validators[propertydata.name].kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" return ( f" @{cc_to_sc(propertydata.name)}.setter\n" From a82cf38fb5b6f0234fb1b6580c803c698f7f6ea3 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 15 Feb 2019 14:34:04 +0100 Subject: [PATCH 54/62] Make Key and Value validators optional --- troposphere_gen/validatordata.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/troposphere_gen/validatordata.py b/troposphere_gen/validatordata.py index cb4914948..9603ac8e8 100644 --- a/troposphere_gen/validatordata.py +++ b/troposphere_gen/validatordata.py @@ -33,19 +33,21 @@ def __init__(self, validatordata: Dict): def parse(self, validatordata: Dict): # 'Map' type validator is the only special case if validatordata["Validator"] == "Map": - keydata = validatordata["ValidatorKey"] - for k, v in keydata.items(): - if k == "Validator": - self.map_key_function = v - else: - self.map_key_kwargs[k] = v - - valuedata = validatordata["ValidatorValue"] - for k, v in valuedata.items(): - if k == "Validator": - self.map_value_function = v - else: - self.map_value_kwargs[k] = v + if "ValidatorKey" in validatordata: + keydata = validatordata["ValidatorKey"] + for k, v in keydata.items(): + if k == "Validator": + self.map_key_function = v + else: + self.map_key_kwargs[k] = v + + if "ValidatorValue" in validatordata: + valuedata = validatordata["ValidatorValue"] + for k, v in valuedata.items(): + if k == "Validator": + self.map_value_function = v + else: + self.map_value_kwargs[k] = v else: for k, v in validatordata.items(): if k == "Validator": From 1f1d0886f6f7bc93a07625cfde47a102ff9b4215 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 15 Feb 2019 15:12:15 +0100 Subject: [PATCH 55/62] Add basic validators --- troposphere/helpers/__init__.py | 0 troposphere/openstack/__init__.py | 0 troposphere/validators/__init__.py | 2 ++ troposphere/validators/alphanumeric.py | 14 +++++++++++++ troposphere/validators/exceptions.py | 4 ++++ troposphere/validators/regex.py | 27 ++++++++++++++++++++++++++ 6 files changed, 47 insertions(+) delete mode 100644 troposphere/helpers/__init__.py delete mode 100644 troposphere/openstack/__init__.py create mode 100644 troposphere/validators/__init__.py create mode 100644 troposphere/validators/alphanumeric.py create mode 100644 troposphere/validators/exceptions.py create mode 100644 troposphere/validators/regex.py diff --git a/troposphere/helpers/__init__.py b/troposphere/helpers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/troposphere/openstack/__init__.py b/troposphere/openstack/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/troposphere/validators/__init__.py b/troposphere/validators/__init__.py new file mode 100644 index 000000000..bb7d35155 --- /dev/null +++ b/troposphere/validators/__init__.py @@ -0,0 +1,2 @@ +from troposphere.validators.alphanumeric import alphanumeric +from troposphere.validators.regex import regex diff --git a/troposphere/validators/alphanumeric.py b/troposphere/validators/alphanumeric.py new file mode 100644 index 000000000..0813f386e --- /dev/null +++ b/troposphere/validators/alphanumeric.py @@ -0,0 +1,14 @@ +"""Alphanumeric validator""" + +from troposphere.validators.exceptions import TroposphereValidationError +import re + +alphanumeric_expr = re.compile("[^a-zA-Z0-9]") + +def alphanumeric(input: str, *args, **kwargs): + match = alphanumeric_expr.search(input) + # If any non-alphanumeric characters are found, throw an exception + if match: + raise TroposphereValidationError( + "Only alphanumeric characters (a-z A-Z 0-9) are allowed." + ) diff --git a/troposphere/validators/exceptions.py b/troposphere/validators/exceptions.py new file mode 100644 index 000000000..984c61cb8 --- /dev/null +++ b/troposphere/validators/exceptions.py @@ -0,0 +1,4 @@ +"""Validation exceptions""" + +class TroposphereValidationError(Exception): + pass diff --git a/troposphere/validators/regex.py b/troposphere/validators/regex.py new file mode 100644 index 000000000..0b7dd05af --- /dev/null +++ b/troposphere/validators/regex.py @@ -0,0 +1,27 @@ +"""Alphanumeric validator""" + +from troposphere.validators.exceptions import TroposphereValidationError +import re + +from typing import Union, List + + +def regex(value: str, *args, must_match: Union[List[str], str] = None, **kwargs): + """Regex validator + + This validator uses regex patterns to check if the input string is valid. + + Arguments: + must_match: Either a single regex pattern or a list of regex patterns, which must all match. + + Raises: + TroposphereValidationError: If the rules passed via kwargs are violated, this exception is raised. + """ + + if must_match is not None and type(must_match) is not list: + must_match = [must_match] + if must_match is not None: + for pattern in must_match: + match = re.search(pattern, value) + if not match: + raise TroposphereValidationError(f"Input value '{value}' does not match pattern '{must_match}'") From 6784c6ac198a09667cb827fe42d42af9725688a4 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Fri, 15 Feb 2019 15:12:33 +0100 Subject: [PATCH 56/62] Make Key and Value validators optional --- troposphere_gen/policy.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 9636c04ce..4758bbf32 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -207,14 +207,16 @@ def property_setter(self, propertydata: Property, classdata: ClassData, conflict f" raise ValueError(\"{cc_to_sc(propertydata.name)} map-values must be of type '{self.get_itemtype(propertydata.type)}' (is: '%s')\" % type(v))\n" ) if classdata.validatordata is not None and propertydata.name in classdata.validatordata.validators: - type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_key_function}(k, self" - for k, v in classdata.validatordata.validators[propertydata.name].map_key_kwargs.items(): - type_check += f", {k}=\"{v}\"" - type_check += f")\n" - type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_value_function}(v, self" - for k, v in classdata.validatordata.validators[propertydata.name].map_value_kwargs.items(): - type_check += f", {k}=\"{v}\"" - type_check += f")\n" + if classdata.validatordata.validators[propertydata.name].map_key_function is not None: + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_key_function}(k, self" + for k, v in classdata.validatordata.validators[propertydata.name].map_key_kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" + if classdata.validatordata.validators[propertydata.name].map_value_function is not None: + type_check += f" validators.{classdata.validatordata.validators[propertydata.name].map_value_function}(v, self" + for k, v in classdata.validatordata.validators[propertydata.name].map_value_kwargs.items(): + type_check += f", {k}=\"{v}\"" + type_check += f")\n" else: # Subproperty or primitive type type_check = ( f" if not isinstance(value, {self.get_type(propertydata, conflicted)}):\n" From f1fc76716333faadaf65a6ac7f7e052a7b220c1f Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 16 Feb 2019 20:05:22 +0100 Subject: [PATCH 57/62] Refactor Validator name --- troposphere_gen/validatordata.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/troposphere_gen/validatordata.py b/troposphere_gen/validatordata.py index 9603ac8e8..cdf83a1bc 100644 --- a/troposphere_gen/validatordata.py +++ b/troposphere_gen/validatordata.py @@ -8,15 +8,15 @@ class ValidatorData(): def __init__(self, validatordata: Dict): - self.validators: Dict[str, Validator] = {} + self.validators: Dict[str, PropertyValidator] = {} self.parse(validatordata) def parse(self, validatordata: Dict): for propname, validatordict in validatordata["Properties"].items(): - self.validators[propname] = Validator(validatordict) + self.validators[propname] = PropertyValidator(validatordict) -class Validator(): +class PropertyValidator(): def __init__(self, validatordata: Dict): # Name of validator function and kwargs to be passed to it self.function: str = None From a5cc2fe86f9e4a32f9a42678cf35423838ba4042 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sat, 16 Feb 2019 20:22:17 +0100 Subject: [PATCH 58/62] Add test for validatordata.py --- tests/generator/test_validatordata.py | 58 +++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 tests/generator/test_validatordata.py diff --git a/tests/generator/test_validatordata.py b/tests/generator/test_validatordata.py new file mode 100644 index 000000000..c3a40db69 --- /dev/null +++ b/tests/generator/test_validatordata.py @@ -0,0 +1,58 @@ +import unittest +from troposphere_gen.validatordata import * + +validatordata = { + "PropertyTypes": { + "AWS::SomeService::SomeResource.SomeProperty": { + "Properties": { + "SubProperty1": { + "Validator": "Map", + "ValidatorKey": { + "Validator": "somefunc1", + "kwarg1": "foo", + "kwarg2": "bar", + }, + "ValidatorValue": { + "Validator": "somefunc2", + "kwarg3": "baz" + } + }, + "SubProperty2": { + "Validator": "somefunc3", + "kwarg1": "foo", + "kwarg2": "bar", + "kwarg3": "baz" + } + } + } + }, + "ResourceTypes": {} +} + + +class TestValidatorData(unittest.TestCase): + def test_validatordata(self): + valdata = ValidatorData(validatordata["PropertyTypes"]["AWS::SomeService::SomeResource.SomeProperty"]) + + # Check that both Properties were found + self.assertEqual(2, len(valdata.validators)) + self.assertIn("SubProperty1", valdata.validators) + self.assertIn("SubProperty2", valdata.validators) + + # Check Map-type validator + mapval = valdata.validators["SubProperty1"] + self.assertIsNone(mapval.function) + self.assertDictEqual({}, mapval.kwargs) + self.assertEqual("somefunc1", mapval.map_key_function) + self.assertDictEqual({"kwarg1": "foo", "kwarg2": "bar"}, mapval.map_key_kwargs) + self.assertEqual("somefunc2", mapval.map_value_function) + self.assertDictEqual({"kwarg3": "baz"}, mapval.map_value_kwargs) + + # Check regular validator (non-Map) + val = valdata.validators["SubProperty2"] + self.assertEqual("somefunc3", val.function) + self.assertDictEqual({"kwarg1": "foo", "kwarg2": "bar", "kwarg3": "baz"}, val.kwargs) + + +if __name__ == '__main__': + unittest.main() From 8d6e2a68d5f80c75259ab3df7aa69cd178f9e384 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 17 Feb 2019 16:53:48 +0100 Subject: [PATCH 59/62] Remove old troposphere --- troposphere/amazonmq.py | 80 -- troposphere/analytics.py | 184 ----- troposphere/apigateway.py | 475 ----------- troposphere/applicationautoscaling.py | 109 --- troposphere/appstream.py | 152 ---- troposphere/appsync.py | 186 ----- troposphere/ask.py | 32 - troposphere/athena.py | 17 - troposphere/autoscaling.py | 392 --------- troposphere/autoscalingplans.py | 150 ---- troposphere/awslambda.py | 273 ------ troposphere/batch.py | 186 ----- troposphere/budgets.py | 82 -- troposphere/certificatemanager.py | 20 - troposphere/cloud9.py | 28 - troposphere/cloudformation.py | 222 ----- troposphere/cloudfront.py | 228 ----- troposphere/cloudtrail.py | 37 - troposphere/cloudwatch.py | 110 --- troposphere/codebuild.py | 242 ------ troposphere/codecommit.py | 42 - troposphere/codedeploy.py | 240 ------ troposphere/codepipeline.py | 171 ---- troposphere/cognito.py | 253 ------ troposphere/config.py | 139 ---- troposphere/constants.py | 416 ---------- troposphere/datapipeline.py | 60 -- troposphere/dax.py | 53 -- troposphere/directoryservice.py | 43 - troposphere/dlm.py | 82 -- troposphere/dms.py | 140 ---- troposphere/docdb.py | 67 -- troposphere/dynamodb.py | 173 ---- troposphere/dynamodb2.py | 35 - troposphere/ec2.py | 1097 ------------------------- troposphere/ecr.py | 23 - troposphere/ecs.py | 290 ------- troposphere/efs.py | 46 -- troposphere/eks.py | 24 - troposphere/elasticache.py | 157 ---- troposphere/elasticbeanstalk.py | 139 ---- troposphere/elasticloadbalancing.py | 98 --- troposphere/elasticloadbalancingv2.py | 224 ----- troposphere/elasticsearch.py | 90 -- troposphere/emr.py | 426 ---------- troposphere/events.py | 67 -- troposphere/firehose.py | 221 ----- troposphere/glue.py | 370 --------- troposphere/guardduty.py | 92 --- troposphere/helpers/userdata.py | 38 - troposphere/iam.py | 139 ---- troposphere/inspector.py | 36 - troposphere/iot.py | 198 ----- troposphere/iot1click.py | 39 - troposphere/iotanalytics.py | 253 ------ troposphere/kinesis.py | 35 - troposphere/kms.py | 35 - troposphere/logs.py | 62 -- troposphere/neptune.py | 82 -- troposphere/openstack/heat.py | 30 - troposphere/openstack/neutron.py | 309 ------- troposphere/openstack/nova.py | 162 ---- troposphere/opsworks.py | 333 -------- troposphere/policies.py | 64 -- troposphere/rds.py | 429 ---------- troposphere/redshift.py | 98 --- troposphere/route53.py | 207 ----- troposphere/s3.py | 423 ---------- troposphere/sagemaker.py | 99 --- troposphere/sdb.py | 12 - troposphere/secretsmanager.py | 88 -- troposphere/serverless.py | 342 -------- troposphere/servicecatalog.py | 165 ---- troposphere/servicediscovery.py | 86 -- troposphere/ses.py | 190 ----- troposphere/sns.py | 53 -- troposphere/sqs.py | 56 -- troposphere/ssm.py | 234 ------ troposphere/stepfunctions.py | 23 - troposphere/template_generator.py | 417 ---------- troposphere/utils.py | 41 - troposphere/validators.py | 472 ----------- troposphere/waf.py | 142 ---- troposphere/wafregional.py | 151 ---- troposphere/workspaces.py | 32 - 85 files changed, 14058 deletions(-) delete mode 100644 troposphere/amazonmq.py delete mode 100644 troposphere/analytics.py delete mode 100644 troposphere/apigateway.py delete mode 100644 troposphere/applicationautoscaling.py delete mode 100644 troposphere/appstream.py delete mode 100644 troposphere/appsync.py delete mode 100644 troposphere/ask.py delete mode 100644 troposphere/athena.py delete mode 100644 troposphere/autoscaling.py delete mode 100644 troposphere/autoscalingplans.py delete mode 100644 troposphere/awslambda.py delete mode 100644 troposphere/batch.py delete mode 100644 troposphere/budgets.py delete mode 100644 troposphere/certificatemanager.py delete mode 100644 troposphere/cloud9.py delete mode 100644 troposphere/cloudformation.py delete mode 100644 troposphere/cloudfront.py delete mode 100644 troposphere/cloudtrail.py delete mode 100644 troposphere/cloudwatch.py delete mode 100644 troposphere/codebuild.py delete mode 100644 troposphere/codecommit.py delete mode 100644 troposphere/codedeploy.py delete mode 100644 troposphere/codepipeline.py delete mode 100644 troposphere/cognito.py delete mode 100644 troposphere/config.py delete mode 100644 troposphere/constants.py delete mode 100644 troposphere/datapipeline.py delete mode 100644 troposphere/dax.py delete mode 100644 troposphere/directoryservice.py delete mode 100644 troposphere/dlm.py delete mode 100644 troposphere/dms.py delete mode 100644 troposphere/docdb.py delete mode 100644 troposphere/dynamodb.py delete mode 100644 troposphere/dynamodb2.py delete mode 100644 troposphere/ec2.py delete mode 100644 troposphere/ecr.py delete mode 100644 troposphere/ecs.py delete mode 100644 troposphere/efs.py delete mode 100644 troposphere/eks.py delete mode 100644 troposphere/elasticache.py delete mode 100644 troposphere/elasticbeanstalk.py delete mode 100644 troposphere/elasticloadbalancing.py delete mode 100644 troposphere/elasticloadbalancingv2.py delete mode 100644 troposphere/elasticsearch.py delete mode 100644 troposphere/emr.py delete mode 100644 troposphere/events.py delete mode 100644 troposphere/firehose.py delete mode 100644 troposphere/glue.py delete mode 100644 troposphere/guardduty.py delete mode 100644 troposphere/helpers/userdata.py delete mode 100644 troposphere/iam.py delete mode 100644 troposphere/inspector.py delete mode 100644 troposphere/iot.py delete mode 100644 troposphere/iot1click.py delete mode 100644 troposphere/iotanalytics.py delete mode 100644 troposphere/kinesis.py delete mode 100644 troposphere/kms.py delete mode 100644 troposphere/logs.py delete mode 100644 troposphere/neptune.py delete mode 100644 troposphere/openstack/heat.py delete mode 100644 troposphere/openstack/neutron.py delete mode 100644 troposphere/openstack/nova.py delete mode 100644 troposphere/opsworks.py delete mode 100644 troposphere/policies.py delete mode 100644 troposphere/rds.py delete mode 100644 troposphere/redshift.py delete mode 100644 troposphere/route53.py delete mode 100644 troposphere/s3.py delete mode 100644 troposphere/sagemaker.py delete mode 100644 troposphere/sdb.py delete mode 100644 troposphere/secretsmanager.py delete mode 100644 troposphere/serverless.py delete mode 100644 troposphere/servicecatalog.py delete mode 100644 troposphere/servicediscovery.py delete mode 100644 troposphere/ses.py delete mode 100644 troposphere/sns.py delete mode 100644 troposphere/sqs.py delete mode 100644 troposphere/ssm.py delete mode 100644 troposphere/stepfunctions.py delete mode 100644 troposphere/template_generator.py delete mode 100644 troposphere/utils.py delete mode 100644 troposphere/validators.py delete mode 100644 troposphere/waf.py delete mode 100644 troposphere/wafregional.py delete mode 100644 troposphere/workspaces.py diff --git a/troposphere/amazonmq.py b/troposphere/amazonmq.py deleted file mode 100644 index 81459d857..000000000 --- a/troposphere/amazonmq.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer - - -class ConfigurationId(AWSProperty): - props = { - 'Id': (basestring, True), - 'Revision': (integer, True), - } - - -class MaintenanceWindow(AWSProperty): - props = { - 'DayOfWeek': (basestring, True), - 'TimeOfDay': (basestring, True), - 'TimeZone': (basestring, True), - } - - -class User(AWSProperty): - props = { - 'ConsoleAccess': (boolean, False), - 'Groups': ([basestring], False), - 'Password': (basestring, True), - 'Username': (basestring, True), - } - - -class LogsConfiguration(AWSProperty): - props = { - 'Audit': (boolean, False), - 'General': (boolean, False), - } - - -class Broker(AWSObject): - resource_type = "AWS::AmazonMQ::Broker" - - props = { - 'AutoMinorVersionUpgrade': (boolean, True), - 'BrokerName': (basestring, True), - 'Users': ([User], True), - 'Configuration': (ConfigurationId, False), - 'DeploymentMode': (basestring, True), - 'EngineType': (basestring, True), - 'EngineVersion': (basestring, True), - 'HostInstanceType': (basestring, True), - 'Logs': (LogsConfiguration, False), - 'MaintenanceWindowStartTime': (MaintenanceWindow, False), - 'PubliclyAccessible': (boolean, True), - 'SecurityGroups': ([basestring], False), - 'SubnetIds': ([basestring], False), - 'Tags': ((Tags, list), False), - } - - -class Configuration(AWSObject): - resource_type = "AWS::AmazonMQ::Configuration" - - props = { - 'Data': (basestring, True), - 'Description': (basestring, False), - 'EngineType': (basestring, True), - 'EngineVersion': (basestring, True), - 'Name': (basestring, True), - } - - -class ConfigurationAssociation(AWSObject): - resource_type = "AWS::AmazonMQ::ConfigurationAssociation" - - props = { - 'Broker': (basestring, True), - 'Configuration': (ConfigurationId, True), - } diff --git a/troposphere/analytics.py b/troposphere/analytics.py deleted file mode 100644 index 65dc99c47..000000000 --- a/troposphere/analytics.py +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import integer - - -class InputParallelism(AWSProperty): - props = { - 'Count': (integer, True), - } - - -class RecordColumn(AWSProperty): - props = { - 'Mapping': (basestring, False), - 'Name': (basestring, True), - 'SqlType': (basestring, True), - } - - -class CSVMappingParameters(AWSProperty): - props = { - 'RecordColumnDelimiter': (basestring, True), - 'RecordRowDelimiter': (basestring, True), - } - - -class JSONMappingParameters(AWSProperty): - props = { - 'RecordRowPath': (basestring, True), - } - - -class MappingParameters(AWSProperty): - props = { - 'CSVMappingParameters': (CSVMappingParameters, False), - 'JSONMappingParameters': (JSONMappingParameters, False), - } - - -class RecordFormat(AWSProperty): - props = { - 'MappingParameters': (MappingParameters, False), - 'RecordFormatType': (basestring, True), - } - - -class InputSchema(AWSProperty): - props = { - 'RecordColumns': ([RecordColumn], True), - 'RecordEncoding': (basestring, False), - 'RecordFormat': (RecordFormat, True), - } - - -class KinesisFirehoseInput(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class KinesisStreamsInput(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class InputLambdaProcessor(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class InputProcessingConfiguration(AWSProperty): - props = { - 'InputLambdaProcessor': (InputLambdaProcessor, False), - } - - -class Input(AWSProperty): - props = { - 'NamePrefix': (basestring, True), - 'InputParallelism': (InputParallelism, False), - 'InputSchema': (InputSchema, True), - 'KinesisFirehoseInput': (KinesisFirehoseInput, False), - 'KinesisStreamsInput': (KinesisStreamsInput, False), - 'InputProcessingConfiguration': (InputProcessingConfiguration, False), - } - - -class Application(AWSObject): - resource_type = "AWS::KinesisAnalytics::Application" - - props = { - 'ApplicationName': (basestring, False), - 'ApplicationDescription': (basestring, False), - 'ApplicationCode': (basestring, False), - 'Inputs': ([Input], True), - } - - -class DestinationSchema(AWSProperty): - props = { - 'RecordFormatType': (basestring, False), - } - - -class KinesisFirehoseOutput(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class KinesisStreamsOutput(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class LambdaOutput(AWSProperty): - props = { - 'ResourceARN': (basestring, True), - 'RoleARN': (basestring, True), - } - - -class Output(AWSProperty): - props = { - 'DestinationSchema': (DestinationSchema, True), - 'KinesisFirehoseOutput': (KinesisFirehoseOutput, False), - 'KinesisStreamsOutput': (KinesisStreamsOutput, False), - 'LambdaOutput': (LambdaOutput, False), - 'Name': (basestring, True), - } - - -class ApplicationOutput(AWSObject): - resource_type = "AWS::KinesisAnalytics::ApplicationOutput" - - props = { - 'ApplicationName': (basestring, True), - 'Output': (Output, True), - } - - -class ReferenceSchema(AWSProperty): - props = { - 'RecordColumns': ([RecordColumn], True), - 'RecordEncoding': (basestring, False), - 'RecordFormat': (RecordFormat, True), - } - - -class S3ReferenceDataSource(AWSProperty): - props = { - 'BucketARN': (basestring, False), - 'FileKey': (basestring, False), - 'ReferenceRoleARN': (basestring, False), - } - - -class ReferenceDataSource(AWSProperty): - props = { - 'ReferenceSchema': (ReferenceSchema, True), - 'S3ReferenceDataSource': (S3ReferenceDataSource, False), - 'TableName': (basestring, False), - } - - -class ApplicationReferenceDataSource(AWSObject): - resource_type = "AWS::KinesisAnalytics::ApplicationReferenceDataSource" - - props = { - 'ApplicationName': (basestring, True), - 'ReferenceDataSource': (ReferenceDataSource, True), - } diff --git a/troposphere/apigateway.py b/troposphere/apigateway.py deleted file mode 100644 index 8b68123e9..000000000 --- a/troposphere/apigateway.py +++ /dev/null @@ -1,475 +0,0 @@ -from . import AWSObject, AWSProperty, Tags -from .validators import ( - boolean, double, integer_range, json_checker, positive_integer -) - - -def validate_authorizer_ttl(ttl_value): - """ Validate authorizer ttl timeout - :param ttl_value: The TTL timeout in seconds - :return: The provided TTL value if valid - """ - ttl_value = int(positive_integer(ttl_value)) - if ttl_value > 3600: - raise ValueError("The AuthorizerResultTtlInSeconds should be <= 3600") - return ttl_value - - -class AccessLogSetting(AWSProperty): - - props = { - "DestinationArn": (basestring, False), - "Format": (basestring, False) - } - - -class Account(AWSObject): - resource_type = "AWS::ApiGateway::Account" - - props = { - "CloudWatchRoleArn": (basestring, False) - } - - -class StageKey(AWSProperty): - - props = { - "RestApiId": (basestring, False), - "StageName": (basestring, False) - } - - -class ApiKey(AWSObject): - resource_type = "AWS::ApiGateway::ApiKey" - - props = { - "CustomerId": (basestring, False), - "Description": (basestring, False), - "Enabled": (boolean, False), - "GenerateDistinctId": (boolean, False), - "Name": (basestring, False), - "StageKeys": ([StageKey], False) - } - - -class Authorizer(AWSObject): - resource_type = "AWS::ApiGateway::Authorizer" - - props = { - "AuthType": (basestring, False), - "AuthorizerCredentials": (basestring, False), - "AuthorizerResultTtlInSeconds": (validate_authorizer_ttl, False), - "AuthorizerUri": (basestring, True), - "IdentitySource": (basestring, True), - "IdentityValidationExpression": (basestring, False), - "Name": (basestring, True), - "ProviderARNs": ([basestring], False), - "RestApiId": (basestring, False), - "Type": (basestring, True) - } - - -class BasePathMapping(AWSObject): - resource_type = "AWS::ApiGateway::BasePathMapping" - - props = { - "BasePath": (basestring, False), - "DomainName": (basestring, True), - "RestApiId": (basestring, True), - "Stage": (basestring, False) - } - - -# Represents: -# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-stage-canarysetting.html -class CanarySetting(AWSProperty): - - props = { - "DeploymentId": (basestring, False), - "PercentTraffic": ([double], False), - "StageVariableOverrides": (dict, False), - "UseStageCache": (boolean, False), - } - - -StageCanarySetting = CanarySetting - - -class ClientCertificate(AWSObject): - resource_type = "AWS::ApiGateway::ClientCertificate" - - props = { - "Description": (basestring, False) - } - - -# Represents: -# http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-deployment-canarysetting.html -# and -# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-deployment-deploymentcanarysettings.html -class DeploymentCanarySettings(AWSProperty): - - props = { - "PercentTraffic": ([double], False), - "StageVariableOverrides": (dict, False), - "UseStageCache": (boolean, False), - } - - -DeploymentCanarySetting = DeploymentCanarySettings - - -class MethodSetting(AWSProperty): - - props = { - "CacheDataEncrypted": (bool, False), - "CacheTtlInSeconds": (positive_integer, False), - "CachingEnabled": (bool, False), - "DataTraceEnabled": (bool, False), - "HttpMethod": (basestring, True), - "LoggingLevel": (basestring, False), - "MetricsEnabled": (bool, False), - "ResourcePath": (basestring, True), - "ThrottlingBurstLimit": (positive_integer, False), - "ThrottlingRateLimit": (positive_integer, False) - } - - -class StageDescription(AWSProperty): - - props = { - "AccessLogSetting": (AccessLogSetting, False), - "CacheClusterEnabled": (bool, False), - "CacheClusterSize": (basestring, False), - "CacheDataEncrypted": (bool, False), - "CacheTtlInSeconds": (positive_integer, False), - "CachingEnabled": (bool, False), - "CanarySetting": (DeploymentCanarySettings, False), - "ClientCertificateId": (basestring, False), - "DataTraceEnabled": (bool, False), - "Description": (basestring, False), - "LoggingLevel": (basestring, False), - "MethodSettings": ([MethodSetting], False), - "MetricsEnabled": (bool, False), - "StageName": (basestring, False), - "Tags": ((Tags, list), False), - "ThrottlingBurstLimit": (positive_integer, False), - "ThrottlingRateLimit": (positive_integer, False), - "Variables": (dict, False), - } - - def validate(self): - if 'StageName' in self.properties: - raise DeprecationWarning( - "The StageName property has been deprecated " - "in StageDescription" - ) - - -class Deployment(AWSObject): - resource_type = "AWS::ApiGateway::Deployment" - - props = { - "DeploymentCanarySettings": (DeploymentCanarySettings, False), - "Description": (basestring, False), - "RestApiId": (basestring, True), - "StageDescription": (StageDescription, False), - "StageName": (basestring, False) - } - - -class Location(AWSProperty): - props = { - "Method": (basestring, False), - "Name": (basestring, False), - "Path": (basestring, False), - "StatusCode": (basestring, False), - "Type": (basestring, False), - } - - -class DocumentationPart(AWSObject): - resource_type = "AWS::ApiGateway::DocumentationPart" - - props = { - "Location": (Location, True), - "Properties": (basestring, True), - "RestApiId": (basestring, True), - } - - -class DocumentationVersion(AWSObject): - resource_type = "AWS::ApiGateway::DocumentationVersion" - - props = { - "Description": (basestring, False), - "DocumentationVersion": (basestring, True), - "RestApiId": (basestring, True), - } - - -class EndpointConfiguration(AWSProperty): - - props = { - "Types": ([basestring], False) - } - - -class DomainName(AWSObject): - resource_type = "AWS::ApiGateway::DomainName" - - props = { - "CertificateArn": (basestring, False), - "DomainName": (basestring, True), - "EndpointConfiguration": (EndpointConfiguration, False), - "RegionalCertificateArn": (basestring, False), - } - - -class IntegrationResponse(AWSProperty): - - props = { - "ContentHandling": (basestring, False), - "ResponseParameters": (dict, False), - "ResponseTemplates": (dict, False), - "SelectionPattern": (basestring, False), - "StatusCode": (basestring, False) - } - - -class Integration(AWSProperty): - - props = { - "CacheKeyParameters": ([basestring], False), - "CacheNamespace": (basestring, False), - "ConnectionId": (basestring, False), - "ConnectionType": (basestring, False), - "ContentHandling": (basestring, False), - "Credentials": (basestring, False), - "IntegrationHttpMethod": (basestring, False), - "IntegrationResponses": ([IntegrationResponse], False), - "PassthroughBehavior": (basestring, False), - "RequestParameters": (dict, False), - "RequestTemplates": (dict, False), - "TimeoutInMillis": (integer_range(50, 29000), False), - "Type": (basestring, True), - "Uri": (basestring, False) - } - - -class MethodResponse(AWSProperty): - - props = { - "ResponseModels": (dict, False), - "ResponseParameters": (dict, False), - "StatusCode": (basestring, True) - } - - -class Method(AWSObject): - resource_type = "AWS::ApiGateway::Method" - - props = { - "ApiKeyRequired": (bool, False), - "AuthorizationScopes": ([basestring], False), - "AuthorizationType": (basestring, True), - "AuthorizerId": (basestring, False), - "HttpMethod": (basestring, True), - "Integration": (Integration, False), - "MethodResponses": ([MethodResponse], False), - "OperationName": (basestring, False), - "RequestModels": (dict, False), - "RequestParameters": (dict, False), - "RequestValidatorId": (basestring, False), - "ResourceId": (basestring, True), - "RestApiId": (basestring, True) - } - - -class Model(AWSObject): - resource_type = "AWS::ApiGateway::Model" - - props = { - "ContentType": (basestring, False), - "Description": (basestring, False), - "Name": (basestring, False), - "RestApiId": (basestring, True), - "Schema": ((basestring, dict), False) - } - - def validate(self): - name = 'Schema' - if name in self.properties: - schema = self.properties.get(name) - self.properties[name] = json_checker(schema) - - -class RequestValidator(AWSObject): - resource_type = "AWS::ApiGateway::RequestValidator" - - props = { - "Name": (basestring, True), - "RestApiId": (basestring, True), - "ValidateRequestBody": (boolean, False), - "ValidateRequestParameters": (boolean, False), - } - - -class Resource(AWSObject): - resource_type = "AWS::ApiGateway::Resource" - - props = { - "ParentId": (basestring, True), - "PathPart": (basestring, True), - "RestApiId": (basestring, True) - } - - -class S3Location(AWSProperty): - - props = { - "Bucket": (basestring, False), - "ETag": (basestring, False), - "Key": (basestring, False), - "Version": (basestring, False) - } - - -class RestApi(AWSObject): - resource_type = "AWS::ApiGateway::RestApi" - - props = { - "ApiKeySourceType": (basestring, False), - "BinaryMediaTypes": ([basestring], False), - "Body": (dict, False), - "BodyS3Location": (S3Location, False), - "CloneFrom": (basestring, False), - "Description": (basestring, False), - "EndpointConfiguration": (EndpointConfiguration, False), - "FailOnWarnings": (basestring, False), - "MinimumCompressionSize": (positive_integer, False), - "Name": (basestring, False), - "Parameters": (dict, False), - "Policy": (dict, False), - } - - -class Stage(AWSObject): - resource_type = "AWS::ApiGateway::Stage" - - props = { - "AccesLogSetting": (AccessLogSetting, False), - "CacheClusterEnabled": (bool, False), - "CacheClusterSize": (basestring, False), - "CanarySetting": (StageCanarySetting, False), - "ClientCertificateId": (basestring, False), - "DeploymentId": (basestring, True), - "Description": (basestring, False), - "DocumentationVersion": (basestring, False), - "MethodSettings": ([MethodSetting], False), - "RestApiId": (basestring, True), - "StageName": (basestring, True), - "Tags": ((Tags, list), False), - "Variables": (dict, False), - } - - -class QuotaSettings(AWSProperty): - props = { - "Limit": (positive_integer, False), - "Offset": (positive_integer, False), - "Period": (basestring, False), - } - - -class ThrottleSettings(AWSProperty): - props = { - "BurstLimit": (positive_integer, False), - "RateLimit": (positive_integer, False), - } - - -class ApiStage(AWSProperty): - props = { - "ApiId": (basestring, False), - "Stage": (basestring, False), - "Throttle": (ThrottleSettings, False), - } - - -class UsagePlan(AWSObject): - resource_type = "AWS::ApiGateway::UsagePlan" - - props = { - "ApiStages": ([ApiStage], False), - "Description": (basestring, False), - "Quota": (QuotaSettings, False), - "Throttle": (ThrottleSettings, False), - "UsagePlanName": (basestring, False), - } - - -class UsagePlanKey(AWSObject): - resource_type = "AWS::ApiGateway::UsagePlanKey" - - props = { - "KeyId": (basestring, True), - "KeyType": (basestring, True), - "UsagePlanId": (basestring, True), - } - - -def validate_gateway_response_type(response_type): - """ Validate response type - :param response_type: The GatewayResponse response type - :return: The provided value if valid - """ - valid_response_types = [ - "ACCESS_DENIED", - "API_CONFIGURATION_ERROR", - "AUTHORIZER_FAILURE", - "AUTHORIZER_CONFIGURATION_ERROR", - "BAD_REQUEST_PARAMETERS", - "BAD_REQUEST_BODY", - "DEFAULT_4XX", - "DEFAULT_5XX", - "EXPIRED_TOKEN", - "INVALID_SIGNATURE", - "INTEGRATION_FAILURE", - "INTEGRATION_TIMEOUT", - "INVALID_API_KEY", - "MISSING_AUTHENTICATION_TOKEN", - "QUOTA_EXCEEDED", - "REQUEST_TOO_LARGE", - "RESOURCE_NOT_FOUND", - "THROTTLED", - "UNAUTHORIZED", - "UNSUPPORTED_MEDIA_TYPE" - ] - if response_type not in valid_response_types: - raise ValueError( - "{} is not a valid ResponseType".format(response_type) - ) - return response_type - - -class GatewayResponse(AWSObject): - resource_type = "AWS::ApiGateway::GatewayResponse" - - props = { - "ResponseParameters": (dict, False), - "ResponseTemplates": (dict, False), - "ResponseType": (validate_gateway_response_type, True), - "RestApiId": (basestring, True), - "StatusCode": (basestring, False) - } - - -class VpcLink(AWSObject): - resource_type = "AWS::ApiGateway::VpcLink" - - props = { - 'Description': (basestring, False), - 'Name': (basestring, True), - 'TargetArns': ([basestring], True), - } diff --git a/troposphere/applicationautoscaling.py b/troposphere/applicationautoscaling.py deleted file mode 100644 index 4e1d58046..000000000 --- a/troposphere/applicationautoscaling.py +++ /dev/null @@ -1,109 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import boolean, double, integer, positive_integer - - -class ScalableTargetAction(AWSProperty): - props = { - 'MaxCapacity': (integer, False), - 'MinCapacity': (integer, False), - } - - -class ScheduledAction(AWSProperty): - props = { - 'EndTime': (basestring, False), - 'ScalableTargetAction': (ScalableTargetAction, False), - 'Schedule': (basestring, True), - 'ScheduledActionName': (basestring, True), - 'StartTime': (basestring, False), - } - - -class ScalableTarget(AWSObject): - resource_type = "AWS::ApplicationAutoScaling::ScalableTarget" - - props = { - 'MaxCapacity': (integer, True), - 'MinCapacity': (integer, True), - 'ResourceId': (basestring, True), - 'RoleARN': (basestring, True), - 'ScalableDimension': (basestring, True), - 'ScheduledActions': ([ScheduledAction], False), - 'ServiceNamespace': (basestring, True), - } - - -class StepAdjustment(AWSProperty): - props = { - 'MetricIntervalLowerBound': (integer, False), - 'MetricIntervalUpperBound': (integer, False), - 'ScalingAdjustment': (integer, True), - } - - -class StepScalingPolicyConfiguration(AWSProperty): - props = { - 'AdjustmentType': (basestring, False), - 'Cooldown': (integer, False), - 'MetricAggregationType': (basestring, False), - 'MinAdjustmentMagnitude': (integer, False), - 'StepAdjustments': ([StepAdjustment], False), - } - - -class MetricDimension(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, True), - } - - -class CustomizedMetricSpecification(AWSProperty): - props = { - 'Dimensions': ([MetricDimension], False), - 'MetricName': (basestring, False), - 'Namespace': (basestring, False), - 'Statistic': (basestring, False), - 'Unit': (basestring, True), - } - - -class PredefinedMetricSpecification(AWSProperty): - props = { - 'PredefinedMetricType': (basestring, True), - 'ResourceLabel': (basestring, False), - } - - -class TargetTrackingScalingPolicyConfiguration(AWSProperty): - props = { - 'CustomizedMetricSpecification': - (CustomizedMetricSpecification, False), - 'DisableScaleIn': (boolean, False), - 'PredefinedMetricSpecification': - (PredefinedMetricSpecification, False), - 'ScaleInCooldown': (positive_integer, False), - 'ScaleOutCooldown': (positive_integer, False), - 'TargetValue': (double, True), - } - - -class ScalingPolicy(AWSObject): - resource_type = "AWS::ApplicationAutoScaling::ScalingPolicy" - - props = { - 'PolicyName': (basestring, True), - 'PolicyType': (basestring, False), - 'ResourceId': (basestring, False), - 'ScalableDimension': (basestring, False), - 'ServiceNamespace': (basestring, False), - 'ScalingTargetId': (basestring, False), - 'StepScalingPolicyConfiguration': ( - StepScalingPolicyConfiguration, - False, - ), - 'TargetTrackingScalingPolicyConfiguration': ( - TargetTrackingScalingPolicyConfiguration, - False, - ), - } diff --git a/troposphere/appstream.py b/troposphere/appstream.py deleted file mode 100644 index 32383063e..000000000 --- a/troposphere/appstream.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -class ServiceAccountCredentials(AWSProperty): - props = { - 'AccountName': (basestring, True), - 'AccountPassword': (basestring, True), - } - - -class DirectoryConfig(AWSObject): - resource_type = "AWS::AppStream::DirectoryConfig" - - props = { - 'DirectoryName': (basestring, True), - 'OrganizationalUnitDistinguishedNames': ([basestring], True), - 'ServiceAccountCredentials': (ServiceAccountCredentials, True), - } - - -class ComputeCapacity(AWSProperty): - props = { - 'DesiredInstances': (integer, True), - } - - -class VpcConfig(AWSProperty): - props = { - 'SecurityGroupIds': ([basestring], False), - 'SubnetIds': ([basestring], False), - } - - -class DomainJoinInfo(AWSProperty): - props = { - 'DirectoryName': (basestring, False), - 'OrganizationalUnitDistinguishedName': (basestring, False), - } - - -class Fleet(AWSObject): - resource_type = "AWS::AppStream::Fleet" - - props = { - 'ComputeCapacity': (ComputeCapacity, True), - 'Description': (basestring, False), - 'DisconnectTimeoutInSeconds': (integer, False), - 'DisplayName': (basestring, False), - 'DomainJoinInfo': (DomainJoinInfo, False), - 'EnableDefaultInternetAccess': (boolean, False), - 'FleetType': (basestring, False), - 'ImageArn': (basestring, False), - 'ImageName': (basestring, False), - 'InstanceType': (basestring, True), - 'MaxUserDurationInSeconds': (integer, False), - 'Name': (basestring, False), - 'VpcConfig': (VpcConfig, False), - } - - -class ImageBuilder(AWSObject): - resource_type = "AWS::AppStream::ImageBuilder" - - props = { - 'AppstreamAgentVersion': (basestring, False), - 'Description': (basestring, False), - 'DisplayName': (basestring, False), - 'DomainJoinInfo': (DomainJoinInfo, False), - 'EnableDefaultInternetAccess': (boolean, False), - 'ImageArn': (basestring, False), - 'ImageName': (basestring, False), - 'InstanceType': (basestring, True), - 'Name': (basestring, False), - 'VpcConfig': (VpcConfig, False), - } - - -class StackFleetAssociation(AWSObject): - resource_type = "AWS::AppStream::StackFleetAssociation" - - props = { - 'FleetName': (basestring, True), - 'StackName': (basestring, True), - } - - -class StorageConnector(AWSProperty): - props = { - 'ConnectorType': (basestring, True), - 'Domains': ([basestring], False), - 'ResourceIdentifier': (basestring, False), - } - - -class UserSetting(AWSProperty): - props = { - 'Action': (basestring, True), - 'Permission': (basestring, True), - } - - -class ApplicationSettings(AWSProperty): - props = { - 'Enabled': (boolean, True), - 'SettingsGroup': (basestring, False), - } - - -class Stack(AWSObject): - resource_type = "AWS::AppStream::Stack" - - props = { - 'ApplicationSettings': (ApplicationSettings, False), - 'AttributesToDelete': ([basestring], False), - 'DeleteStorageConnectors': (boolean, False), - 'Description': (basestring, False), - 'DisplayName': (basestring, False), - 'FeedbackURL': (basestring, False), - 'Name': (basestring, False), - 'RedirectURL': (basestring, False), - 'StorageConnectors': ([StorageConnector], False), - 'UserSettings': ([UserSetting], False), - } - - -class StackUserAssociation(AWSObject): - resource_type = "AWS::AppStream::StackUserAssociation" - - props = { - 'AuthenticationType': (basestring, True), - 'SendEmailNotification': (boolean, False), - 'StackName': (basestring, True), - 'UserName': (basestring, True), - } - - -class User(AWSObject): - resource_type = "AWS::AppStream::User" - - props = { - 'AuthenticationType': (basestring, True), - 'FirstName': (basestring, False), - 'LastName': (basestring, False), - 'MessageAction': (basestring, False), - 'UserName': (basestring, True), - } diff --git a/troposphere/appsync.py b/troposphere/appsync.py deleted file mode 100644 index 0603827ce..000000000 --- a/troposphere/appsync.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -def resolver_kind_validator(x): - valid_types = ["UNIT", "PIPELINE"] - if x not in valid_types: - raise ValueError("Kind must be one of: %s" % ", ".join(valid_types)) - return x - - -class ApiKey(AWSObject): - resource_type = "AWS::AppSync::ApiKey" - - props = { - 'ApiId': (basestring, True), - 'Description': (basestring, False), - 'Expires': (integer, False), - } - - -class DynamoDBConfig(AWSProperty): - props = { - 'AwsRegion': (basestring, True), - 'TableName': (basestring, True), - 'UseCallerCredentials': (boolean, False), - } - - -class ElasticsearchConfig(AWSProperty): - props = { - 'AwsRegion': (basestring, True), - 'Endpoint': (basestring, True), - } - - -class AwsIamConfig(AWSProperty): - props = { - 'SigningRegion': (basestring, False), - 'SigningServiceName': (basestring, False), - } - - -class AuthorizationConfig(AWSProperty): - props = { - 'AuthorizationType': (basestring, True), - 'AwsIamConfig': (AwsIamConfig, False), - } - - -class HttpConfig(AWSProperty): - props = { - 'AuthorizationConfig': (AuthorizationConfig, False), - 'Endpoint': (basestring, True), - } - - -class LambdaConfig(AWSProperty): - props = { - 'LambdaFunctionArn': (basestring, True), - } - - -class RdsHttpEndpointConfig(AWSProperty): - props = { - 'AwsRegion': (basestring, False), - 'DbClusterIdentifier': (basestring, False), - 'DatabaseName': (basestring, False), - 'Schema': (basestring, False), - 'AwsSecretStoreArn': (basestring, False), - } - - -class RelationalDatabaseConfig(AWSProperty): - props = { - 'RelationalDatasourceType': (basestring, False), - 'RdsHttpEndpointConfig': (RdsHttpEndpointConfig, False), - } - - -class DataSource(AWSObject): - resource_type = "AWS::AppSync::DataSource" - - props = { - 'ApiId': (basestring, True), - 'Description': (basestring, False), - 'DynamoDBConfig': (DynamoDBConfig, False), - 'ElasticsearchConfig': (ElasticsearchConfig, False), - 'HttpConfig': (HttpConfig, False), - 'LambdaConfig': (LambdaConfig, False), - 'Name': (basestring, True), - 'ServiceRoleArn': (basestring, False), - 'Type': (basestring, True), - 'RelationalDatabaseConfig': (RelationalDatabaseConfig, False), - } - - -class LogConfig(AWSProperty): - props = { - 'CloudWatchLogsRoleArn': (basestring, False), - 'FieldLogLevel': (basestring, False), - } - - -class OpenIDConnectConfig(AWSProperty): - props = { - 'AuthTTL': (float, False), - 'ClientId': (basestring, False), - 'IatTTL': (float, False), - 'Issuer': (basestring, True), - } - - -class UserPoolConfig(AWSProperty): - props = { - 'AppIdClientRegex': (basestring, False), - 'AwsRegion': (basestring, False), - 'DefaultAction': (basestring, False), - 'UserPoolId': (basestring, False), - } - - -class GraphQLApi(AWSObject): - resource_type = "AWS::AppSync::GraphQLApi" - - props = { - 'AuthenticationType': (basestring, True), - 'LogConfig': (LogConfig, False), - 'Name': (basestring, True), - 'OpenIDConnectConfig': (OpenIDConnectConfig, False), - 'UserPoolConfig': (UserPoolConfig, False), - } - - -class GraphQLSchema(AWSObject): - resource_type = "AWS::AppSync::GraphQLSchema" - - props = { - 'ApiId': (basestring, True), - 'Definition': (basestring, False), - 'DefinitionS3Location': (basestring, False), - } - - -class PipelineConfig(AWSProperty): - props = { - 'Functions': ([basestring], False), - } - - -class Resolver(AWSObject): - resource_type = "AWS::AppSync::Resolver" - - props = { - 'ApiId': (basestring, True), - 'DataSourceName': (basestring, True), - 'FieldName': (basestring, True), - 'Kind': (resolver_kind_validator, False), - 'PipelineConfig': (PipelineConfig, False), - 'RequestMappingTemplate': (basestring, False), - 'RequestMappingTemplateS3Location': (basestring, False), - 'ResponseMappingTemplate': (basestring, False), - 'ResponseMappingTemplateS3Location': (basestring, False), - 'TypeName': (basestring, True), - } - - -class FunctionConfiguration(AWSObject): - resource_type = "AWS::AppSync::FunctionConfiguration" - - props = { - 'ApiId': (basestring, True), - 'Name': (basestring, False), - 'Description': (basestring, False), - 'DataSourceName': (basestring, False), - 'FunctionVersion': (basestring, False), - 'RequestMappingTemplate': (basestring, False), - 'RequestMappingTemplateS3Location': (basestring, False), - 'ResponseMappingTemplate': (basestring, False), - 'ResponseMappingTemplateS3Location': (basestring, False), - } diff --git a/troposphere/ask.py b/troposphere/ask.py deleted file mode 100644 index 63e512943..000000000 --- a/troposphere/ask.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import json_checker - - -class AuthenticationConfiguration(AWSProperty): - props = { - 'DefaultAttributes': (json_checker, False), - 'DeviceTemplates': (json_checker, False), - } - - -class SkillPackage(AWSProperty): - props = { - 'ClientId': (basestring, True), - 'ClientSecret': (basestring, True), - 'RefreshToken': (basestring, True), - } - - -class Skill(AWSObject): - resource_type = "Alexa::ASK::Skill" - - props = { - 'AuthenticationConfiguration': (AuthenticationConfiguration, True), - 'SkillPackage': (SkillPackage, True), - 'VendorId': (basestring, True), - } diff --git a/troposphere/athena.py b/troposphere/athena.py deleted file mode 100644 index 3bdb3b9c1..000000000 --- a/troposphere/athena.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject - - -class NamedQuery(AWSObject): - resource_type = "AWS::Athena::NamedQuery" - - props = { - 'Database': (basestring, True), - 'Description': (basestring, False), - 'Name': (basestring, False), - 'QueryString': (basestring, True), - } diff --git a/troposphere/autoscaling.py b/troposphere/autoscaling.py deleted file mode 100644 index 2f09f1a15..000000000 --- a/troposphere/autoscaling.py +++ /dev/null @@ -1,392 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty, If, FindInMap, Ref -from .validators import boolean, integer, exactly_one, mutually_exclusive -from . import cloudformation - - -EC2_INSTANCE_LAUNCH = "autoscaling:EC2_INSTANCE_LAUNCH" -EC2_INSTANCE_LAUNCH_ERROR = "autoscaling:EC2_INSTANCE_LAUNCH_ERROR" -EC2_INSTANCE_TERMINATE = "autoscaling:EC2_INSTANCE_TERMINATE" -EC2_INSTANCE_TERMINATE_ERROR = "autoscaling:EC2_INSTANCE_TERMINATE_ERROR" -TEST_NOTIFICATION = "autoscaling:TEST_NOTIFICATION" - -# Termination Policy constants -Default = 'Default' -OldestInstance = 'OldestInstance' -NewestInstance = 'NewestInstance' -OldestLaunchConfiguration = 'OldestLaunchConfiguration' -ClosestToNextInstanceHour = 'ClosestToNextInstanceHour' - - -class Tag(AWSHelperFn): - def __init__(self, key, value, propogate): - self.data = { - 'Key': key, - 'Value': value, - 'PropagateAtLaunch': propogate, - } - - -class Tags(AWSHelperFn): - defaultPropagateAtLaunch = True - manyType = [type([]), type(())] - - def __init__(self, **kwargs): - self.tags = [] - for k, v in sorted(kwargs.iteritems()): - if type(v) in self.manyType: - propagate = str(v[1]).lower() - v = v[0] - else: - propagate = str(self.defaultPropagateAtLaunch).lower() - self.tags.append({ - 'Key': k, - 'Value': v, - 'PropagateAtLaunch': propagate, - }) - - # append tags to list - def __add__(self, newtags): - newtags.tags = self.tags + newtags.tags - return newtags - - def to_dict(self): - return self.tags - - -class LifecycleHookSpecification(AWSProperty): - props = { - 'DefaultResult': (basestring, False), - 'HeartbeatTimeout': (basestring, False), - 'LifecycleHookName': (basestring, True), - 'LifecycleTransition': (basestring, True), - 'NotificationMetadata': (basestring, False), - 'NotificationTargetARN': (basestring, False), - 'RoleARN': (basestring, False), - } - - -class NotificationConfigurations(AWSProperty): - props = { - 'TopicARN': (basestring, True), - 'NotificationTypes': (list, True), - } - - -class MetricsCollection(AWSProperty): - props = { - 'Granularity': (basestring, True), - 'Metrics': (list, False), - } - - -class Metadata(AWSHelperFn): - def __init__(self, init, authentication=None): - self.validate(init, authentication) - # get keys and values from init and authentication - - # if there's only one data point, then we know its the default - # cfn-init; where the key is 'config' - if len(init.data) == 1: - initKey, initValue = init.data.popitem() - self.data = {initKey: initValue} - else: - self.data = init.data - - if authentication: - authKey, authValue = authentication.data.popitem() - self.data[authKey] = authValue - - def validate(self, init, authentication): - if not isinstance(init, cloudformation.Init): - raise ValueError( - 'init must be of type cloudformation.Init' - ) - - is_instance = isinstance(authentication, cloudformation.Authentication) - if authentication and not is_instance: - raise ValueError( - 'authentication must be of type cloudformation.Authentication' - ) - - -class LaunchTemplateSpecification(AWSProperty): - props = { - 'LaunchTemplateId': (basestring, False), - 'LaunchTemplateName': (basestring, False), - 'Version': (basestring, True) - } - - def validate(self): - template_ids = [ - 'LaunchTemplateId', - 'LaunchTemplateName' - ] - exactly_one(self.__class__.__name__, self.properties, template_ids) - - -class InstancesDistribution(AWSProperty): - props = { - 'OnDemandAllocationStrategy': (basestring, False), - 'OnDemandBaseCapacity': (integer, False), - 'OnDemandPercentageAboveBaseCapacity': (integer, False), - 'SpotAllocationStrategy': (basestring, False), - 'SpotInstancePools': (integer, False), - 'SpotMaxPrice': (basestring, False), - } - - -class LaunchTemplateOverrides(AWSProperty): - props = { - 'InstanceType': (basestring, False), - } - - -class LaunchTemplate(AWSProperty): - props = { - 'LaunchTemplateSpecification': (LaunchTemplateSpecification, True), - 'Overrides': ([LaunchTemplateOverrides], True), - } - - -class MixedInstancesPolicy(AWSProperty): - props = { - 'InstancesDistribution': (InstancesDistribution, False), - 'LaunchTemplate': (LaunchTemplate, True), - } - - -class AutoScalingGroup(AWSObject): - resource_type = "AWS::AutoScaling::AutoScalingGroup" - - props = { - 'AutoScalingGroupName': (basestring, False), - 'AvailabilityZones': (list, False), - 'Cooldown': (integer, False), - 'DesiredCapacity': (integer, False), - 'HealthCheckGracePeriod': (integer, False), - 'HealthCheckType': (basestring, False), - 'InstanceId': (basestring, False), - 'LaunchConfigurationName': (basestring, False), - 'LaunchTemplate': (LaunchTemplateSpecification, False), - 'LifecycleHookSpecificationList': - ([LifecycleHookSpecification], False), - 'LoadBalancerNames': (list, False), - 'MaxSize': (integer, True), - 'MetricsCollection': ([MetricsCollection], False), - 'MinSize': (integer, True), - 'MixedInstancesPolicy': (MixedInstancesPolicy, False), - 'NotificationConfigurations': ([NotificationConfigurations], False), - 'PlacementGroup': (basestring, False), - 'ServiceLinkedRoleARN': (basestring, False), - 'Tags': ((Tags, list), False), - 'TargetGroupARNs': ([basestring], False), - 'TerminationPolicies': ([basestring], False), - 'VPCZoneIdentifier': (list, False), - } - - def validate(self): - if 'UpdatePolicy' in self.resource: - update_policy = self.resource['UpdatePolicy'] - - if (not isinstance(update_policy, AWSHelperFn) and - 'AutoScalingRollingUpdate' in update_policy.properties): - if not isinstance( - update_policy.AutoScalingRollingUpdate, AWSHelperFn): - rolling_update = update_policy.AutoScalingRollingUpdate - - min_instances = rolling_update.properties.get( - "MinInstancesInService", "0") - is_min_no_check = isinstance( - min_instances, (FindInMap, Ref) - ) - is_max_no_check = isinstance(self.MaxSize, - (If, FindInMap, Ref)) - - if not (is_min_no_check or is_max_no_check): - max_count = int(self.MaxSize) - min_count = int(min_instances) - - if min_count >= max_count: - raise ValueError( - "The UpdatePolicy attribute " - "MinInstancesInService must be less than the " - "autoscaling group's MaxSize") - - instance_config_types = [ - 'LaunchConfigurationName', - 'LaunchTemplate', - 'InstanceId' - ] - - mutually_exclusive(self.__class__.__name__, self.properties, - instance_config_types) - - availability_zones = self.properties.get('AvailabilityZones') - vpc_zone_identifier = self.properties.get('VPCZoneIdentifier') - if not availability_zones and not vpc_zone_identifier: - raise ValueError("Must specify AvailabilityZones and/or " - "VPCZoneIdentifier: http://docs.aws.amazon.com/A" - "WSCloudFormation/latest/UserGuide/aws-propertie" - "s-as-group.html#cfn-as-group-vpczoneidentifier") - return True - - -class LaunchConfiguration(AWSObject): - resource_type = "AWS::AutoScaling::LaunchConfiguration" - - props = { - 'AssociatePublicIpAddress': (boolean, False), - 'BlockDeviceMappings': (list, False), - 'ClassicLinkVPCId': (basestring, False), - 'ClassicLinkVPCSecurityGroups': ([basestring], False), - 'EbsOptimized': (boolean, False), - 'IamInstanceProfile': (basestring, False), - 'ImageId': (basestring, True), - 'InstanceId': (basestring, False), - 'InstanceMonitoring': (boolean, False), - 'InstanceType': (basestring, True), - 'KernelId': (basestring, False), - 'KeyName': (basestring, False), - 'LaunchConfigurationName': (basestring, False), - 'Metadata': (Metadata, False), - 'PlacementTenancy': (basestring, False), - 'RamDiskId': (basestring, False), - 'SecurityGroups': (list, False), - 'SpotPrice': (basestring, False), - 'UserData': (basestring, False), - } - - -class StepAdjustments(AWSProperty): - props = { - 'MetricIntervalLowerBound': (integer, False), - 'MetricIntervalUpperBound': (integer, False), - 'ScalingAdjustment': (integer, True), - } - - -class MetricDimension(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, True), - } - - -class CustomizedMetricSpecification(AWSProperty): - props = { - 'Dimensions': ([MetricDimension], False), - 'MetricName': (basestring, True), - 'Namespace': (basestring, True), - 'Statistic': (basestring, True), - 'Unit': (basestring, False), - } - - -class PredefinedMetricSpecification(AWSProperty): - props = { - 'PredefinedMetricType': (basestring, True), - 'ResourceLabel': (basestring, False), - } - - -class TargetTrackingConfiguration(AWSProperty): - props = { - 'CustomizedMetricSpecification': - (CustomizedMetricSpecification, False), - 'DisableScaleIn': (boolean, False), - 'PredefinedMetricSpecification': - (PredefinedMetricSpecification, False), - 'TargetValue': (float, True), - } - - -class ScalingPolicy(AWSObject): - resource_type = "AWS::AutoScaling::ScalingPolicy" - - props = { - 'AdjustmentType': (basestring, False), - 'AutoScalingGroupName': (basestring, True), - 'Cooldown': (integer, False), - 'EstimatedInstanceWarmup': (integer, False), - 'MetricAggregationType': (basestring, False), - 'MinAdjustmentMagnitude': (integer, False), - 'PolicyType': (basestring, False), - 'ScalingAdjustment': (integer, False), - 'StepAdjustments': ([StepAdjustments], False), - 'TargetTrackingConfiguration': (TargetTrackingConfiguration, False), - } - - -class ScheduledAction(AWSObject): - resource_type = "AWS::AutoScaling::ScheduledAction" - - props = { - 'AutoScalingGroupName': (basestring, True), - 'DesiredCapacity': (integer, False), - 'EndTime': (basestring, False), - 'MaxSize': (integer, False), - 'MinSize': (integer, False), - 'Recurrence': (basestring, False), - 'StartTime': (basestring, False), - } - - -class LifecycleHook(AWSObject): - resource_type = "AWS::AutoScaling::LifecycleHook" - - props = { - 'AutoScalingGroupName': (basestring, True), - 'DefaultResult': (basestring, False), - 'HeartbeatTimeout': (integer, False), - 'LifecycleHookName': (basestring, False), - 'LifecycleTransition': (basestring, True), - 'NotificationMetadata': (basestring, False), - 'NotificationTargetARN': (basestring, False), - 'RoleARN': (basestring, False), - } - - -class Trigger(AWSObject): - resource_type = "AWS::AutoScaling::Trigger" - - props = { - 'AutoScalingGroupName': (basestring, True), - 'BreachDuration': (integer, True), - 'Dimensions': (list, True), - 'LowerBreachScaleIncrement': (integer, False), - 'LowerThreshold': (integer, True), - 'MetricName': (basestring, True), - 'Namespace': (basestring, True), - 'Period': (integer, True), - 'Statistic': (basestring, True), - 'Unit': (basestring, False), - 'UpperBreachScaleIncrement': (integer, False), - 'UpperThreshold': (integer, True), - } - - -class EBSBlockDevice(AWSProperty): - # http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-as-launchconfig-blockdev-template.html - props = { - 'DeleteOnTermination': (boolean, False), - 'Encrypted': (boolean, False), - 'Iops': (integer, False), - 'SnapshotId': (basestring, False), - 'VolumeSize': (integer, False), - 'VolumeType': (basestring, False), - } - - -class BlockDeviceMapping(AWSProperty): - # http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-as-launchconfig-blockdev-mapping.html - props = { - 'DeviceName': (basestring, True), - 'Ebs': (EBSBlockDevice, False), - 'NoDevice': (boolean, False), - 'VirtualName': (basestring, False), - } diff --git a/troposphere/autoscalingplans.py b/troposphere/autoscalingplans.py deleted file mode 100644 index c830cfda5..000000000 --- a/troposphere/autoscalingplans.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import (boolean, double, integer, scalable_dimension_type, - service_namespace_type, statistic_type) - - -VALID_PREDICTIVESCALINGMAXCAPACITYBEHAVIOR = ( - 'SetForecastCapacityToMaxCapacity', - 'SetMaxCapacityToForecastCapacity', - 'SetMaxCapacityAboveForecastCapacity', -) -VALID_PREDICTIVESCALINGMODE = ('ForecastAndScale', 'ForecastOnly') -VALID_SCALINGPOLICYUPDATEBEHAVIOR = ('KeepExternalPolicies', - 'ReplaceExternalPolicies') - - -def validate_predictivescalingmaxcapacitybehavior( - predictivescalingmaxcapacitybehavior): - """Validate PredictiveScalingMaxCapacityBehavior for ScalingInstruction""" # noqa - - if predictivescalingmaxcapacitybehavior not in VALID_PREDICTIVESCALINGMAXCAPACITYBEHAVIOR: # noqa - raise ValueError("ScalingInstruction PredictiveScalingMaxCapacityBehavior must be one of: %s" % # noqa - ", ".join(VALID_PREDICTIVESCALINGMAXCAPACITYBEHAVIOR)) - return predictivescalingmaxcapacitybehavior - - -def validate_predictivescalingmode(predictivescalingmode): - """Validate PredictiveScalingMode for ScalingInstruction""" - - if predictivescalingmode not in VALID_PREDICTIVESCALINGMODE: - raise ValueError("ScalingInstruction PredictiveScalingMode must be one of: %s" % # noqa - ", ".join(VALID_PREDICTIVESCALINGMODE)) - return predictivescalingmode - - -def validate_scalingpolicyupdatebehavior(scalingpolicyupdatebehavior): - """Validate ScalingPolicyUpdateBehavior for ScalingInstruction""" - - if scalingpolicyupdatebehavior not in VALID_SCALINGPOLICYUPDATEBEHAVIOR: - raise ValueError("ScalingInstruction ScalingPolicyUpdateBehavior must be one of: %s" % # noqa - ", ".join(VALID_SCALINGPOLICYUPDATEBEHAVIOR)) - return scalingpolicyupdatebehavior - - -class TagFilter(AWSProperty): - props = { - 'Values': ([basestring], False), - 'Key': (basestring, True) - } - - -class ApplicationSource(AWSProperty): - props = { - 'CloudFormationStackARN': (basestring, False), - 'TagFilters': ([TagFilter], False) - } - - -class PredefinedScalingMetricSpecification(AWSProperty): - props = { - 'ResourceLabel': (basestring, False), - 'PredefinedScalingMetricType': (basestring, True) - } - - -class MetricDimension(AWSProperty): - props = { - 'Value': (basestring, True), - 'Name': (basestring, True) - } - - -class CustomizedScalingMetricSpecification(AWSProperty): - props = { - 'MetricName': (basestring, True), - 'Statistic': (statistic_type, True), - 'Dimensions': ([MetricDimension], False), - 'Unit': (basestring, False), - 'Namespace': (basestring, True) - } - - -class TargetTrackingConfiguration(AWSProperty): - props = { - 'ScaleOutCooldown': (integer, False), - 'TargetValue': (double, True), - 'PredefinedScalingMetricSpecification': ( - PredefinedScalingMetricSpecification, - False - ), - 'DisableScaleIn': (boolean, False), - 'ScaleInCooldown': (integer, False), - 'EstimatedInstanceWarmup': (integer, False), - 'CustomizedScalingMetricSpecification': ( - CustomizedScalingMetricSpecification, - False - ) - } - - -class CustomizedLoadMetricSpecification(AWSObject): - props = { - 'Dimensions': ([MetricDimension], False), - 'MetricName': (basestring, True), - 'Namespace': (basestring, True), - 'Statistic': (basestring, True), - 'Unit': (basestring, False), - } - - -class PredefinedLoadMetricSpecification(AWSProperty): - props = { - 'PredefinedLoadMetricType': (basestring, True), - 'ResourceLabel': (basestring, False), - } - - -class ScalingInstruction(AWSProperty): - props = { - 'CustomizedLoadMetricSpecification': (CustomizedLoadMetricSpecification, False), # NOQA - 'DisableDynamicScaling': (boolean, False), - 'MaxCapacity': (integer, True), - 'MinCapacity': (integer, True), - 'PredefinedLoadMetricSpecification': (PredefinedLoadMetricSpecification, False), # NOQA - 'PredictiveScalingMaxCapacityBehavior': (validate_predictivescalingmaxcapacitybehavior, False), # NOQA - 'PredictiveScalingMaxCapacityBuffer': (integer, False), - 'PredictiveScalingMode': (validate_predictivescalingmode, False), - 'ResourceId': (basestring, True), - 'ScalableDimension': (scalable_dimension_type, True), - 'ScalingPolicyUpdateBehavior': (validate_scalingpolicyupdatebehavior, False), # NOQA - 'ScheduledActionBufferTime': (integer, False), - 'ServiceNamespace': (service_namespace_type, True), - 'TargetTrackingConfigurations': ( - [TargetTrackingConfiguration], - True - ), - } - - -class ScalingPlan(AWSObject): - resource_type = "AWS::AutoScalingPlans::ScalingPlan" - - props = { - 'ApplicationSource': (ApplicationSource, True), - 'ScalingInstructions': ([ScalingInstruction], True) - } diff --git a/troposphere/awslambda.py b/troposphere/awslambda.py deleted file mode 100644 index c20b539e4..000000000 --- a/troposphere/awslambda.py +++ /dev/null @@ -1,273 +0,0 @@ -import re -from . import AWSObject, AWSProperty, Join, Tags -from .validators import positive_integer - -MEMORY_VALUES = [x for x in range(128, 3009, 64)] -RESERVED_ENVIRONMENT_VARIABLES = [ - 'AWS_ACCESS_KEY', - 'AWS_ACCESS_KEY_ID', - 'AWS_DEFAULT_REGION', - 'AWS_EXECUTION_ENV', - 'AWS_LAMBDA_FUNCTION_MEMORY_SIZE', - 'AWS_LAMBDA_FUNCTION_NAME', - 'AWS_LAMBDA_FUNCTION_VERSION', - 'AWS_LAMBDA_LOG_GROUP_NAME', - 'AWS_LAMBDA_LOG_STREAM_NAME', - 'AWS_REGION', - 'AWS_SECRET_ACCESS_KEY', - 'AWS_SECRET_KEY', - 'AWS_SECURITY_TOKEN', - 'AWS_SESSION_TOKEN', - 'LAMBDA_RUNTIME_DIR', - 'LAMBDA_TASK_ROOT', - 'LANG', - 'LD_LIBRARY_PATH', - 'NODE_PATH', - 'PATH', - 'PYTHONPATH', - 'TZ' -] -ENVIRONMENT_VARIABLES_NAME_PATTERN = r'[a-zA-Z][a-zA-Z0-9_]+' - - -def validate_memory_size(memory_value): - """ Validate memory size for Lambda Function - :param memory_value: The memory size specified in the Function - :return: The provided memory size if it is valid - """ - memory_value = int(positive_integer(memory_value)) - if memory_value not in MEMORY_VALUES: - raise ValueError("Lambda Function memory size must be one of:\n %s" % - ", ".join(str(mb) for mb in MEMORY_VALUES)) - return memory_value - - -def validate_variables_name(variables): - for name in variables: - if name in RESERVED_ENVIRONMENT_VARIABLES: - raise ValueError("Lambda Function environment variables names" - " can't be none of:\n %s" % - ", ".join(RESERVED_ENVIRONMENT_VARIABLES)) - elif not re.match(ENVIRONMENT_VARIABLES_NAME_PATTERN, name): - raise ValueError("Invalid environment variable name: %s" % name) - - return variables - - -class Code(AWSProperty): - props = { - 'S3Bucket': (basestring, False), - 'S3Key': (basestring, False), - 'S3ObjectVersion': (basestring, False), - 'ZipFile': (basestring, False) - } - - @staticmethod - def check_zip_file(zip_file): - maxlength = 4096 - toolong = ( - "ZipFile length cannot exceed %d characters. For larger " - "source use S3Bucket/S3Key properties instead. " - "Current length: %d" - ) - - if zip_file is None: - return - - if isinstance(zip_file, basestring): - z_length = len(zip_file) - if z_length > maxlength: - raise ValueError(toolong % (maxlength, z_length)) - return - - if isinstance(zip_file, Join): - # This code tries to combine the length of all the strings in a - # join. If a part is not a string, we do not count it (length 0). - delimiter, values = zip_file.data['Fn::Join'] - - # Return if there are no values to join - if not values or len(values) <= 0: - return - - # Get the length of the delimiter - if isinstance(delimiter, basestring): - d_length = len(delimiter) - else: - d_length = 0 - - # Get the length of each value that will be joined - v_lengths = [len(v) for v in values if isinstance(v, basestring)] - - # Add all the lengths together - z_length = sum(v_lengths) - z_length += (len(values)-1) * d_length - - if z_length > maxlength: - raise ValueError(toolong % (maxlength, z_length)) - return - - def validate(self): - zip_file = self.properties.get('ZipFile') - s3_bucket = self.properties.get('S3Bucket') - s3_key = self.properties.get('S3Key') - s3_object_version = self.properties.get('S3ObjectVersion') - - if zip_file and s3_bucket: - raise ValueError("You can't specify both 'S3Bucket' and 'ZipFile'") - if zip_file and s3_key: - raise ValueError("You can't specify both 'S3Key' and 'ZipFile'") - if zip_file and s3_object_version: - raise ValueError( - "You can't specify both 'S3ObjectVersion' and 'ZipFile'" - ) - Code.check_zip_file(zip_file) - if not zip_file and not (s3_bucket and s3_key): - raise ValueError( - "You must specify a bucket location (both the 'S3Bucket' and " - "'S3Key' properties) or the 'ZipFile' property" - ) - - -class VPCConfig(AWSProperty): - - props = { - 'SecurityGroupIds': (list, True), - 'SubnetIds': (list, True), - } - - -class EventSourceMapping(AWSObject): - resource_type = "AWS::Lambda::EventSourceMapping" - - props = { - 'BatchSize': (positive_integer, False), - 'Enabled': (bool, False), - 'EventSourceArn': (basestring, True), - 'FunctionName': (basestring, True), - 'StartingPosition': (basestring, False), - } - - -class DeadLetterConfig(AWSProperty): - - props = { - 'TargetArn': (basestring, False), - } - - -class Environment(AWSProperty): - - props = { - 'Variables': (validate_variables_name, True), - } - - -class TracingConfig(AWSProperty): - - props = { - 'Mode': (basestring, False), - } - - -class Function(AWSObject): - resource_type = "AWS::Lambda::Function" - - props = { - 'Code': (Code, True), - 'Description': (basestring, False), - 'DeadLetterConfig': (DeadLetterConfig, False), - 'Environment': (Environment, False), - 'FunctionName': (basestring, False), - 'Handler': (basestring, True), - 'KmsKeyArn': (basestring, False), - 'MemorySize': (validate_memory_size, False), - 'Layers': ([basestring], False), - 'ReservedConcurrentExecutions': (positive_integer, False), - 'Role': (basestring, True), - 'Runtime': (basestring, True), - 'Tags': (Tags, False), - 'Timeout': (positive_integer, False), - 'TracingConfig': (TracingConfig, False), - 'VpcConfig': (VPCConfig, False), - } - - -class Permission(AWSObject): - resource_type = "AWS::Lambda::Permission" - - props = { - 'Action': (basestring, True), - 'EventSourceToken': (basestring, False), - 'FunctionName': (basestring, True), - 'Principal': (basestring, True), - 'SourceAccount': (basestring, False), - 'SourceArn': (basestring, False), - } - - -class VersionWeight(AWSProperty): - - props = { - 'FunctionVersion': (basestring, True), - 'FunctionWeight': (float, True), - } - - -class AliasRoutingConfiguration(AWSProperty): - - props = { - 'AdditionalVersionWeights': ([VersionWeight], True), - } - - -class Alias(AWSObject): - resource_type = "AWS::Lambda::Alias" - - props = { - 'Description': (basestring, False), - 'FunctionName': (basestring, True), - 'FunctionVersion': (basestring, True), - 'Name': (basestring, True), - 'RoutingConfig': (AliasRoutingConfiguration, False), - } - - -class Version(AWSObject): - resource_type = "AWS::Lambda::Version" - - props = { - 'CodeSha256': (basestring, False), - 'Description': (basestring, False), - 'FunctionName': (basestring, True), - } - - -class Content(AWSProperty): - props = { - 'S3Bucket': (basestring, True), - 'S3Key': (basestring, True), - 'S3ObjectVersion': (basestring, False), - } - - -class LayerVersion(AWSObject): - resource_type = "AWS::Lambda::LayerVersion" - - props = { - 'CompatibleRuntimes': ([basestring], False), - 'Content': (Content, True), - 'Description': (basestring, False), - 'LayerName': (basestring, False), - 'LicenseInfo': (basestring, False), - } - - -class LayerVersionPermission(AWSObject): - resource_type = "AWS::Lambda::LayerVersionPermission" - - props = { - 'Action': (basestring, True), - 'LayerVersionArn': (basestring, True), - 'OrganizationId': (basestring, False), - 'Principal': (basestring, True), - } diff --git a/troposphere/batch.py b/troposphere/batch.py deleted file mode 100644 index 70fb0da4b..000000000 --- a/troposphere/batch.py +++ /dev/null @@ -1,186 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import exactly_one, integer, positive_integer - - -class LaunchTemplateSpecification(AWSProperty): - props = { - "LaunchTemplateId": (basestring, False), - "LaunchTemplateName": (basestring, False), - "Version": (basestring, False), - } - - def validate(self): - template_ids = [ - 'LaunchTemplateId', - 'LaunchTemplateName' - ] - exactly_one(self.__class__.__name__, self.properties, template_ids) - - -class ComputeResources(AWSProperty): - - props = { - "SpotIamFleetRole": (basestring, False), - "MaxvCpus": (positive_integer, True), - "SecurityGroupIds": ([basestring], True), - "BidPercentage": (positive_integer, False), - "Type": (basestring, True), - "Subnets": ([basestring], True), - "MinvCpus": (positive_integer, True), - "LaunchTemplate": (LaunchTemplateSpecification, False), - "ImageId": (basestring, False), - "InstanceRole": (basestring, True), - "InstanceTypes": ([basestring], True), - "Ec2KeyPair": (basestring, False), - "PlacementGroup": (basestring, False), - "Tags": (dict, False), - "DesiredvCpus": (positive_integer, False) - } - - -class MountPoints(AWSProperty): - - props = { - "ReadOnly": (bool, False), - "SourceVolume": (basestring, False), - "ContainerPath": (basestring, False) - } - - -class VolumesHost(AWSProperty): - - props = { - "SourcePath": (basestring, False) - } - - -class Volumes(AWSProperty): - - props = { - "Host": (VolumesHost, False), - "Name": (basestring, False) - } - - -class Environment(AWSProperty): - - props = { - "Value": (basestring, False), - "Name": (basestring, False) - } - - -class Ulimit(AWSProperty): - - props = { - "SoftLimit": (positive_integer, True), - "HardLimit": (positive_integer, True), - "Name": (basestring, True) - } - - -class ContainerProperties(AWSProperty): - - props = { - "MountPoints": ([MountPoints], False), - "User": (basestring, False), - "Volumes": ([Volumes], False), - "Command": ([basestring], False), - "Memory": (positive_integer, True), - "Privileged": (bool, False), - "Environment": ([Environment], False), - "JobRoleArn": (basestring, False), - "ReadonlyRootFilesystem": (bool, False), - "Ulimits": ([Ulimit], False), - "Vcpus": (positive_integer, True), - "Image": (basestring, True) - } - - -class RetryStrategy(AWSProperty): - - props = { - "Attempts": (positive_integer, False) - } - - -class Timeout(AWSProperty): - props = { - 'AttemptDurationSeconds': (integer, False), - } - - -class JobDefinition(AWSObject): - resource_type = "AWS::Batch::JobDefinition" - - props = { - 'ContainerProperties': (ContainerProperties, True), - 'JobDefinitionName': (basestring, False), - 'Parameters': (dict, True), - 'RetryStrategy': (RetryStrategy, False), - 'Timeout': (Timeout, False), - 'Type': (basestring, True), - } - - -def validate_environment_state(environment_state): - """ Validate response type - :param environment_state: State of the environment - :return: The provided value if valid - """ - valid_states = [ - "ENABLED", - "DISABLED" - ] - if environment_state not in valid_states: - raise ValueError( - "{} is not a valid environment state".format(environment_state) - ) - return environment_state - - -class ComputeEnvironment(AWSObject): - resource_type = "AWS::Batch::ComputeEnvironment" - - props = { - "Type": (basestring, True), - "ServiceRole": (basestring, True), - "ComputeEnvironmentName": (basestring, False), - "ComputeResources": (ComputeResources, True), - "State": (validate_environment_state, False) - } - - -class ComputeEnvironmentOrder(AWSProperty): - - props = { - "ComputeEnvironment": (basestring, True), - "Order": (positive_integer, True) - } - - -def validate_queue_state(queue_state): - """ Validate response type - :param queue_state: State of the queue - :return: The provided value if valid - """ - valid_states = [ - "ENABLED", - "DISABLED" - ] - if queue_state not in valid_states: - raise ValueError( - "{} is not a valid queue state".format(queue_state) - ) - return queue_state - - -class JobQueue(AWSObject): - resource_type = "AWS::Batch::JobQueue" - - props = { - "ComputeEnvironmentOrder": ([ComputeEnvironmentOrder], True), - "Priority": (positive_integer, True), - "State": (validate_queue_state, False), - "JobQueueName": (basestring, False) - } diff --git a/troposphere/budgets.py b/troposphere/budgets.py deleted file mode 100644 index b351f774c..000000000 --- a/troposphere/budgets.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean - - -class Spend(AWSProperty): - props = { - 'Amount': (float, True), - 'Unit': (basestring, True), - } - - -class CostTypes(AWSProperty): - props = { - 'IncludeCredit': (boolean, False), - 'IncludeDiscount': (boolean, False), - 'IncludeOtherSubscription': (boolean, False), - 'IncludeRecurring': (boolean, False), - 'IncludeRefund': (boolean, False), - 'IncludeSubscription': (boolean, False), - 'IncludeSupport': (boolean, False), - 'IncludeTax': (boolean, False), - 'IncludeUpfront': (boolean, False), - 'UseAmortized': (boolean, False), - 'UseBlended': (boolean, False), - } - - -class TimePeriod(AWSProperty): - props = { - 'End': (basestring, False), - 'Start': (basestring, False), - } - - -class BudgetData(AWSProperty): - props = { - 'BudgetLimit': (Spend, False), - 'BudgetName': (basestring, False), - 'BudgetType': (basestring, True), - 'CostFilters': (dict, False), - 'CostTypes': (CostTypes, False), - 'TimePeriod': (TimePeriod, False), - 'TimeUnit': (basestring, True), - } - - -class Notification(AWSProperty): - props = { - 'ComparisonOperator': (basestring, True), - 'NotificationType': (basestring, True), - 'Threshold': (float, True), - 'ThresholdType': (basestring, False), - } - - -class Subscriber(AWSProperty): - props = { - 'Address': (basestring, True), - 'SubscriptionType': (basestring, True), - } - - -class NotificationWithSubscribers(AWSProperty): - props = { - 'Notification': (Notification, True), - 'Subscribers': ([Subscriber], True), - } - - -class Budget(AWSObject): - resource_type = "AWS::Budgets::Budget" - - props = { - 'Budget': (BudgetData, True), - 'NotificationsWithSubscribers': - ([NotificationWithSubscribers], False), - } diff --git a/troposphere/certificatemanager.py b/troposphere/certificatemanager.py deleted file mode 100644 index 0cacd9e37..000000000 --- a/troposphere/certificatemanager.py +++ /dev/null @@ -1,20 +0,0 @@ -from . import AWSObject, AWSProperty, Tags - - -class DomainValidationOption(AWSProperty): - props = { - 'DomainName': (basestring, True), - 'ValidationDomain': (basestring, True), - } - - -class Certificate(AWSObject): - resource_type = "AWS::CertificateManager::Certificate" - - props = { - 'DomainName': (basestring, True), - 'DomainValidationOptions': ([DomainValidationOption], False), - 'SubjectAlternativeNames': ([basestring], False), - 'Tags': ((Tags, list), False), - 'ValidationMethod': (basestring, False), - } diff --git a/troposphere/cloud9.py b/troposphere/cloud9.py deleted file mode 100644 index fff8c6f0a..000000000 --- a/troposphere/cloud9.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import integer - - -class Repository(AWSProperty): - props = { - 'PathComponent': (basestring, True), - 'RepositoryUrl': (basestring, True), - } - - -class EnvironmentEC2(AWSObject): - resource_type = "AWS::Cloud9::EnvironmentEC2" - - props = { - 'AutomaticStopTimeMinutes': (integer, False), - 'Description': (basestring, False), - 'InstanceType': (basestring, True), - 'Name': (basestring, False), - 'OwnerArn': (basestring, False), - 'Repositories': ([Repository], False), - 'SubnetId': (basestring, False), - } diff --git a/troposphere/cloudformation.py b/troposphere/cloudformation.py deleted file mode 100644 index 2088a3dd7..000000000 --- a/troposphere/cloudformation.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty, BaseAWSObject, Tags -from . import encode_to_dict -from .validators import boolean, check_required, encoding, integer - - -class Stack(AWSObject): - resource_type = "AWS::CloudFormation::Stack" - - props = { - 'NotificationARNs': ([basestring], False), - 'Parameters': (dict, False), - 'Tags': ((Tags, list), False), - 'TemplateURL': (basestring, True), - 'TimeoutInMinutes': (integer, False), - } - - -class AWSCustomObject(BaseAWSObject): - dictname = 'Properties' - - -class CustomResource(AWSCustomObject): - resource_type = "AWS::CloudFormation::CustomResource" - - props = { - 'ServiceToken': (basestring, True) - } - - -class WaitCondition(AWSObject): - resource_type = "AWS::CloudFormation::WaitCondition" - - props = { - 'Count': (integer, False), - 'Handle': (basestring, False), - 'Timeout': (integer, False), - } - - def validate(self): - if 'CreationPolicy' in self.resource: - for k in self.props.keys(): - if k in self.properties: - raise ValueError( - "Property %s cannot be specified with CreationPolicy" % - k - ) - else: - required = ['Handle', 'Timeout'] - check_required(self.__class__.__name__, self.properties, required) - - -class WaitConditionHandle(AWSObject): - resource_type = "AWS::CloudFormation::WaitConditionHandle" - - props = {} - - -class Metadata(AWSHelperFn): - def __init__(self, *args): - self.data = args - - def to_dict(self): - t = [] - for i in self.data: - t += encode_to_dict(i).items() - return dict(t) - - -class InitFileContext(AWSHelperFn): - def __init__(self, data): - self.data = data - - -class InitFile(AWSProperty): - props = { - 'content': (basestring, False), - 'mode': (basestring, False), - 'owner': (basestring, False), - 'encoding': (encoding, False), - 'group': (basestring, False), - 'source': (basestring, False), - 'authentication': (basestring, False), - 'context': (InitFileContext, False) - } - - -class InitFiles(AWSHelperFn): - def __init__(self, data): - self.validate(data) - self.data = data - - def validate(self, data): - for k in data: - if not isinstance(data[k], InitFile): - raise ValueError("File '" + k + "' must be of type InitFile") - - -class InitService(AWSProperty): - props = { - 'ensureRunning': (boolean, False), - 'enabled': (boolean, False), - 'files': (list, False), - 'packages': (dict, False), - 'sources': (list, False), - 'commands': (list, False) - } - - -class InitServices(AWSHelperFn): - def __init__(self, data): - self.validate(data) - self.data = data - - def validate(self, data): - for k in data: - if not isinstance(data[k], InitService): - raise ValueError( - "Service '" + k + "' must be of type InitService" - ) - - -class InitConfigSets(AWSHelperFn): - def __init__(self, **kwargs): - self.validate(dict(kwargs)) - self.data = kwargs - - def validate(self, config_sets): - for k, v in config_sets.iteritems(): - if not isinstance(v, list): - raise ValueError('configSets values must be of type list') - - -class InitConfig(AWSProperty): - props = { - 'groups': (dict, False), - 'users': (dict, False), - 'sources': (dict, False), - 'packages': (dict, False), - 'files': (dict, False), - 'commands': (dict, False), - 'services': (dict, False) - } - - -def validate_authentication_type(auth_type): - valid_types = ['S3', 'basic'] - if auth_type not in valid_types: - raise ValueError('Type needs to be one of %r' % valid_types) - return auth_type - - -class AuthenticationBlock(AWSProperty): - props = { - "accessKeyId": (basestring, False), - "buckets": ([basestring], False), - "password": (basestring, False), - "secretKey": (basestring, False), - "type": (validate_authentication_type, False), - "uris": ([basestring], False), - "username": (basestring, False), - "roleName": (basestring, False) - } - - -class Authentication(AWSHelperFn): - def __init__(self, data): - self.validate(data) - self.data = {"AWS::CloudFormation::Authentication": data} - - def validate(self, data): - for k, v in data.iteritems(): - if not isinstance(v, AuthenticationBlock): - raise ValueError( - 'authentication block must be of type' - ' cloudformation.AuthenticationBlock' - ) - - -class Init(AWSHelperFn): - def __init__(self, data, **kwargs): - self.validate(data, dict(kwargs)) - - if isinstance(data, InitConfigSets): - self.data = { - 'AWS::CloudFormation::Init': dict({'configSets': data}, - **kwargs) - } - else: - self.data = {'AWS::CloudFormation::Init': data} - - def validate(self, data, config_sets): - if isinstance(data, InitConfigSets): - for k, v in sorted(config_sets.iteritems()): - if not isinstance(v, InitConfig): - raise ValueError( - 'init configs must of type ', - 'cloudformation.InitConfigSet' - ) - else: - if 'config' not in data: - raise ValueError('config property is required') - if not isinstance(data['config'], InitConfig): - raise ValueError( - 'config property must be of type cloudformation.InitConfig' - ) - - -class Macro(AWSCustomObject): - resource_type = "AWS::CloudFormation::Macro" - - props = { - 'Description': (basestring, False), - 'FunctionName': (basestring, True), - 'LogGroupName': (basestring, False), - 'LogRoleARN': (basestring, False), - 'Name': (basestring, True), - } diff --git a/troposphere/cloudfront.py b/troposphere/cloudfront.py deleted file mode 100644 index 6445dad47..000000000 --- a/troposphere/cloudfront.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import (boolean, cloudfront_restriction_type, - cloudfront_event_type, - cloudfront_forward_type, - cloudfront_viewer_protocol_policy, integer, - positive_integer, priceclass_type, network_port) - - -class Cookies(AWSProperty): - props = { - 'Forward': (cloudfront_forward_type, True), - 'WhitelistedNames': ([basestring], False), - } - - -class ForwardedValues(AWSProperty): - props = { - 'Cookies': (Cookies, False), - 'Headers': ([basestring], False), - 'QueryString': (boolean, True), - 'QueryStringCacheKeys': ([basestring], False), - } - - -class LambdaFunctionAssociation(AWSProperty): - props = { - 'EventType': (cloudfront_event_type, False), - 'LambdaFunctionARN': (basestring, False), - } - - -class CacheBehavior(AWSProperty): - props = { - 'AllowedMethods': ([basestring], False), - 'CachedMethods': ([basestring], False), - 'Compress': (boolean, False), - 'DefaultTTL': (integer, False), - 'FieldLevelEncryptionId': (basestring, False), - 'ForwardedValues': (ForwardedValues, True), - 'LambdaFunctionAssociations': ([LambdaFunctionAssociation], False), - 'MaxTTL': (integer, False), - 'MinTTL': (integer, False), - 'PathPattern': (basestring, True), - 'SmoothStreaming': (boolean, False), - 'TargetOriginId': (basestring, True), - 'TrustedSigners': ([basestring], False), - 'ViewerProtocolPolicy': (cloudfront_viewer_protocol_policy, True), - } - - -class DefaultCacheBehavior(AWSProperty): - props = { - 'AllowedMethods': ([basestring], False), - 'CachedMethods': ([basestring], False), - 'Compress': (boolean, False), - 'DefaultTTL': (integer, False), - 'FieldLevelEncryptionId': (basestring, False), - 'ForwardedValues': (ForwardedValues, True), - 'LambdaFunctionAssociations': ([LambdaFunctionAssociation], False), - 'MaxTTL': (integer, False), - 'MinTTL': (integer, False), - 'SmoothStreaming': (boolean, False), - 'TargetOriginId': (basestring, True), - 'TrustedSigners': (list, False), - 'ViewerProtocolPolicy': (cloudfront_viewer_protocol_policy, True), - } - - -class S3Origin(AWSProperty): - props = { - 'DomainName': (basestring, True), - 'OriginAccessIdentity': (basestring, False), - } - - -class CustomOriginConfig(AWSProperty): - props = { - 'HTTPPort': (network_port, False), - 'HTTPSPort': (network_port, False), - 'OriginKeepaliveTimeout': (integer, False), - 'OriginProtocolPolicy': (basestring, True), - 'OriginReadTimeout': (integer, False), - 'OriginSSLProtocols': ([basestring], False), - } - - -class OriginCustomHeader(AWSProperty): - props = { - 'HeaderName': (basestring, True), - 'HeaderValue': (basestring, True), - } - - -class S3OriginConfig(AWSProperty): - props = { - 'OriginAccessIdentity': (basestring, False), - } - - -class Origin(AWSProperty): - props = { - 'CustomOriginConfig': (CustomOriginConfig, False), - 'DomainName': (basestring, True), - 'Id': (basestring, True), - 'OriginCustomHeaders': ([OriginCustomHeader], False), - 'OriginPath': (basestring, False), - 'S3OriginConfig': (S3OriginConfig, False), - - } - - -class Logging(AWSProperty): - props = { - 'Bucket': (basestring, True), - 'IncludeCookies': (boolean, False), - 'Prefix': (basestring, False), - } - - -class CustomErrorResponse(AWSProperty): - props = { - 'ErrorCachingMinTTL': (positive_integer, False), - 'ErrorCode': (positive_integer, True), - 'ResponseCode': (positive_integer, False), - 'ResponsePagePath': (basestring, False), - } - - -class GeoRestriction(AWSProperty): - props = { - 'Locations': ([basestring], False), - 'RestrictionType': (cloudfront_restriction_type, True), - } - - -class Restrictions(AWSProperty): - props = { - 'GeoRestriction': (GeoRestriction, True), - } - - -class ViewerCertificate(AWSProperty): - props = { - 'AcmCertificateArn': (basestring, False), - 'CloudFrontDefaultCertificate': (boolean, False), - 'IamCertificateId': (basestring, False), - 'MinimumProtocolVersion': (basestring, False), - 'SslSupportMethod': (basestring, False), - } - - -class DistributionConfig(AWSProperty): - props = { - 'Aliases': (list, False), - 'CacheBehaviors': ([CacheBehavior], False), - 'Comment': (basestring, False), - 'CustomErrorResponses': ([CustomErrorResponse], False), - 'DefaultCacheBehavior': (DefaultCacheBehavior, True), - 'DefaultRootObject': (basestring, False), - 'Enabled': (boolean, True), - 'HttpVersion': (basestring, False), - 'IPV6Enabled': (boolean, False), - 'Logging': (Logging, False), - 'Origins': ([Origin], True), - 'PriceClass': (priceclass_type, False), - 'Restrictions': (Restrictions, False), - 'ViewerCertificate': (ViewerCertificate, False), - 'WebACLId': (basestring, False), - } - - -class Distribution(AWSObject): - resource_type = "AWS::CloudFront::Distribution" - - props = { - 'DistributionConfig': (DistributionConfig, True), - 'Tags': ((Tags, list), False), - } - - -class CloudFrontOriginAccessIdentityConfig(AWSProperty): - props = { - 'Comment': (basestring, True), - } - - -class CloudFrontOriginAccessIdentity(AWSObject): - resource_type = "AWS::CloudFront::CloudFrontOriginAccessIdentity" - - props = { - 'CloudFrontOriginAccessIdentityConfig': ( - CloudFrontOriginAccessIdentityConfig, - True, - ), - } - - -class TrustedSigners(AWSProperty): - props = { - 'AwsAccountNumbers': ([basestring], False), - 'Enabled': (boolean, True), - } - - -class StreamingDistributionConfig(AWSProperty): - props = { - 'Aliases': ([basestring], False), - 'Comment': (basestring, True), - 'Enabled': (boolean, True), - 'Logging': (Logging, False), - 'PriceClass': (priceclass_type, False), - 'S3Origin': (S3Origin, True), - 'TrustedSigners': (TrustedSigners, True), - } - - -class StreamingDistribution(AWSObject): - resource_type = "AWS::CloudFront::StreamingDistribution" - - props = { - 'StreamingDistributionConfig': (StreamingDistributionConfig, True,), - 'Tags': ((Tags, list), False), - } diff --git a/troposphere/cloudtrail.py b/troposphere/cloudtrail.py deleted file mode 100644 index 0eb62c2fb..000000000 --- a/troposphere/cloudtrail.py +++ /dev/null @@ -1,37 +0,0 @@ -from . import AWSObject, Tags, AWSProperty -from .validators import boolean - - -class DataResource(AWSProperty): - props = { - 'Type': (basestring, True), - 'Values': ([basestring], False), - } - - -class EventSelector(AWSProperty): - props = { - 'DataResources': ([DataResource], False), - 'IncludeManagementEvents': (boolean, False), - 'ReadWriteType': (basestring, False), - } - - -class Trail(AWSObject): - resource_type = "AWS::CloudTrail::Trail" - - props = { - 'CloudWatchLogsLogGroupArn': (basestring, False), - 'CloudWatchLogsRoleArn': (basestring, False), - 'EnableLogFileValidation': (boolean, False), - 'EventSelectors': ([EventSelector], False), - 'IncludeGlobalServiceEvents': (boolean, False), - 'IsLogging': (boolean, True), - 'IsMultiRegionTrail': (boolean, False), - 'KMSKeyId': (basestring, False), - 'S3BucketName': (basestring, True), - 'S3KeyPrefix': (basestring, False), - 'SnsTopicName': (basestring, False), - 'Tags': (Tags, False), - 'TrailName': (basestring, False), - } diff --git a/troposphere/cloudwatch.py b/troposphere/cloudwatch.py deleted file mode 100644 index fe9944968..000000000 --- a/troposphere/cloudwatch.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import (boolean, double, exactly_one, json_checker, - positive_integer, integer) - - -VALID_UNITS = ('Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes', - 'Megabytes', 'Gigabytes', 'Terabytes', 'Bits', 'Kilobits', - 'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count', - 'Bytes/Second', 'Kilobytes/Second', 'Megabytes/Second', - 'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second', - 'Kilobits/Second', 'Megabits/Second', 'Gigabits/Second', - 'Terabits/Second', 'Count/Second', 'None') - - -def validate_unit(unit): - """Validate Units""" - - if unit not in VALID_UNITS: - raise ValueError("MetricStat Unit must be one of: %s" % - ", ".join(VALID_UNITS)) - return unit - - -class MetricDimension(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, True), - } - - -class Metric(AWSProperty): - props = { - 'Dimensions': ([MetricDimension], False), - 'MetricName': (basestring, False), - 'Namespace': (basestring, False), - } - - -class MetricStat(AWSProperty): - props = { - 'Metric': (Metric, True), - 'Period': (integer, True), - 'Stat': (basestring, True), - 'Unit': (validate_unit, False), - } - - -class MetricDataQuery(AWSProperty): - props = { - 'Expression': (basestring, False), - 'Id': (basestring, True), - 'Label': (basestring, False), - 'MetricStat': (MetricStat, False), - 'ReturnData': (boolean, False), - } - - -class Alarm(AWSObject): - resource_type = "AWS::CloudWatch::Alarm" - - props = { - 'ActionsEnabled': (boolean, False), - 'AlarmActions': ([basestring], False), - 'AlarmDescription': (basestring, False), - 'AlarmName': (basestring, False), - 'ComparisonOperator': (basestring, True), - 'DatapointsToAlarm': (positive_integer, False), - 'Dimensions': ([MetricDimension], False), - 'EvaluateLowSampleCountPercentile': (basestring, False), - 'EvaluationPeriods': (positive_integer, True), - 'ExtendedStatistic': (basestring, False), - 'InsufficientDataActions': ([basestring], False), - 'MetricName': (basestring, False), - 'Metrics': ([MetricDataQuery], False), - 'Namespace': (basestring, False), - 'OKActions': ([basestring], False), - 'Period': (positive_integer, False), - 'Statistic': (basestring, False), - 'Threshold': (double, True), - 'TreatMissingData': (basestring, False), - 'Unit': (basestring, False), - } - - def validate(self): - conds = [ - 'ExtendedStatistic', - 'Metrics', - 'Statistic', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class Dashboard(AWSObject): - resource_type = "AWS::CloudWatch::Dashboard" - - props = { - 'DashboardBody': ((basestring, dict), True), - 'DashboardName': (basestring, False), - } - - def validate(self): - name = 'DashboardBody' - if name in self.properties: - dashboard_body = self.properties.get(name) - self.properties[name] = json_checker(dashboard_body) diff --git a/troposphere/codebuild.py b/troposphere/codebuild.py deleted file mode 100644 index ebe5793c7..000000000 --- a/troposphere/codebuild.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2016, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import boolean, integer, positive_integer - - -class SourceAuth(AWSProperty): - props = { - 'Resource': (basestring, False), - 'Type': (basestring, True), - } - - def validate(self): - valid_types = [ - 'OAUTH' - ] - auth_types = self.properties.get('Type') - if auth_types not in valid_types: - raise ValueError('SourceAuth Type: must be one of %s' % - ','.join(valid_types)) - - -class Artifacts(AWSProperty): - props = { - 'ArtifactIdentifier': (basestring, False), - 'EncryptionDisabled': (boolean, False), - 'Location': (basestring, False), - 'Name': (basestring, False), - 'NamespaceType': (basestring, False), - 'OverrideArtifactName': (boolean, False), - 'Packaging': (basestring, False), - 'Path': (basestring, False), - 'Type': (basestring, True), - } - - def validate(self): - valid_types = [ - 'CODEPIPELINE', - 'NO_ARTIFACTS', - 'S3', - ] - artifact_type = self.properties.get('Type') - if artifact_type not in valid_types: - raise ValueError('Artifacts Type: must be one of %s' % - ','.join(valid_types)) - - if artifact_type == 'S3': - for required_property in ['Name', 'Location']: - if not self.properties.get(required_property): - raise ValueError( - 'Artifacts Type S3: requires %s to be set' % - required_property - ) - - -class EnvironmentVariable(AWSProperty): - props = { - 'Name': (basestring, True), - 'Type': (basestring, False), - 'Value': (basestring, True), - } - - def validate(self): - if 'Type' in self.properties: - valid_types = [ - 'PARAMETER_STORE', - 'PLAINTEXT', - ] - env_type = self.properties.get('Type') - if env_type not in valid_types: - raise ValueError( - 'EnvironmentVariable Type: must be one of %s' % - ','.join(valid_types)) - - -class Environment(AWSProperty): - props = { - 'Certificate': (basestring, False), - 'ComputeType': (basestring, True), - 'EnvironmentVariables': ((list, [EnvironmentVariable]), False), - 'Image': (basestring, True), - 'PrivilegedMode': (boolean, False), - 'Type': (basestring, True), - } - - def validate(self): - valid_types = [ - 'LINUX_CONTAINER', - 'WINDOWS_CONTAINER', - ] - env_type = self.properties.get('Type') - if env_type not in valid_types: - raise ValueError('Environment Type: must be one of %s' % - ','.join(valid_types)) - - -class ProjectCache(AWSProperty): - props = { - 'Location': (basestring, False), - 'Type': (basestring, True), - } - - def validate(self): - valid_types = [ - 'NO_CACHE', - 'S3', - ] - cache_type = self.properties.get('Type') - if cache_type not in valid_types: - raise ValueError('ProjectCache Type: must be one of %s' % - ','.join(valid_types)) - - -class Source(AWSProperty): - props = { - 'Auth': (SourceAuth, False), - 'BuildSpec': (basestring, False), - 'GitCloneDepth': (positive_integer, False), - 'InsecureSsl': (boolean, False), - 'Location': (basestring, False), - 'ReportBuildStatus': (boolean, False), - 'SourceIdentifier': (basestring, False), - 'Type': (basestring, True), - } - - def validate(self): - valid_types = [ - 'BITBUCKET', - 'CODECOMMIT', - 'CODEPIPELINE', - 'GITHUB', - 'GITHUB_ENTERPRISE', - 'NO_SOURCE', - 'S3', - ] - - location_agnostic_types = [ - 'CODEPIPELINE', - 'NO_SOURCE', - ] - - source_type = self.properties.get('Type') - - # Don't do additional checks if source_type can't - # be determined (for example, being a Ref). - if isinstance(source_type, AWSHelperFn): - return - - if source_type not in valid_types: - raise ValueError('Source Type: must be one of %s' % - ','.join(valid_types)) - - location = self.properties.get('Location') - - if source_type not in location_agnostic_types and not location: - raise ValueError( - 'Source Location: must be defined when type is %s' % - source_type - ) - - auth = self.properties.get('Auth') - if auth is not None and source_type != 'GITHUB': - raise ValueError("SourceAuth: must only be defined when using " - "'GITHUB' Source Type.") - - -class VpcConfig(AWSProperty): - props = { - 'SecurityGroupIds': ([basestring], True), - 'Subnets': ([basestring], True), - 'VpcId': (basestring, True), - } - - -class ProjectTriggers(AWSProperty): - props = { - 'Webhook': (boolean, False), - } - - -def validate_status(status): - """ Validate status - :param status: The Status of CloudWatchLogs or S3Logs - :return: The provided value if valid - """ - valid_statuses = [ - 'ENABLED', - 'DISABLED' - ] - - if status not in valid_statuses: - raise ValueError('Status: must be one of %s' % - ','.join(valid_statuses)) - return status - - -class CloudWatchLogs(AWSProperty): - props = { - "Status": (validate_status, True), - "GroupName": (basestring, False), - "StreamName": (basestring, False) - } - - -class S3Logs(AWSProperty): - props = { - "Status": (validate_status, True), - "Location": (basestring, False) - } - - -class LogsConfig(AWSProperty): - props = { - 'CloudWatchLogs': (CloudWatchLogs, False), - 'S3Logs': (S3Logs, False) - } - - -class Project(AWSObject): - resource_type = "AWS::CodeBuild::Project" - - props = { - 'Artifacts': (Artifacts, True), - 'BadgeEnabled': (boolean, False), - 'Cache': (ProjectCache, False), - 'Description': (basestring, False), - 'EncryptionKey': (basestring, False), - 'Environment': (Environment, True), - "LogsConfig": (LogsConfig, False), - 'Name': (basestring, True), - 'SecondaryArtifacts': ([Artifacts], False), - 'SecondarySources': ([Source], False), - 'ServiceRole': (basestring, True), - 'Source': (Source, True), - 'Tags': (Tags, False), - 'TimeoutInMinutes': (integer, False), - 'Triggers': (ProjectTriggers, False), - 'VpcConfig': (VpcConfig, False), - } diff --git a/troposphere/codecommit.py b/troposphere/codecommit.py deleted file mode 100644 index d9c36351f..000000000 --- a/troposphere/codecommit.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2016, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty - - -class Trigger(AWSProperty): - props = { - 'Branches': ([basestring], False), - 'CustomData': (basestring, False), - 'DestinationArn': (basestring, False), - 'Events': ([basestring], False), - 'Name': (basestring, False), - } - - def validate(self): - valid = [ - 'all', - 'createReference', - 'deleteReference', - 'updateReference', - ] - events = self.properties.get('Events') - if events and not isinstance(events, AWSHelperFn): - if 'all' in events and len(events) != 1: - raise ValueError('Trigger events: all must be used alone') - else: - for e in events: - if e not in valid and not isinstance(e, AWSHelperFn): - raise ValueError('Trigger: invalid event %s' % e) - - -class Repository(AWSObject): - resource_type = "AWS::CodeCommit::Repository" - - props = { - 'RepositoryDescription': (basestring, False), - 'RepositoryName': (basestring, True), - 'Triggers': ([Trigger], False), - } diff --git a/troposphere/codedeploy.py b/troposphere/codedeploy.py deleted file mode 100644 index c35a1ea2a..000000000 --- a/troposphere/codedeploy.py +++ /dev/null @@ -1,240 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, exactly_one, mutually_exclusive,\ - positive_integer - -KEY_ONLY = "KEY_ONLY" -VALUE_ONLY = "VALUE_ONLY" -KEY_AND_VALUE = "KEY_AND_VALUE" - - -class GitHubLocation(AWSProperty): - props = { - 'CommitId': (basestring, True), - 'Repository': (basestring, True), - } - - -class S3Location(AWSProperty): - props = { - 'Bucket': (basestring, True), - 'BundleType': (basestring, True), - 'ETag': (basestring, False), - 'Key': (basestring, True), - 'Version': (basestring, False), - } - - -class Revision(AWSProperty): - props = { - 'GitHubLocation': (GitHubLocation, False), - 'RevisionType': (basestring, False), - 'S3Location': (S3Location, False), - } - - -def deployment_option_validator(x): - valid_values = ['WITH_TRAFFIC_CONTROL', 'WITHOUT_TRAFFIC_CONTROL'] - if x not in valid_values: - raise ValueError("Deployment Option value must be one of: %s" % - ', '.join(valid_values)) - return x - - -def deployment_type_validator(x): - valid_values = ['IN_PLACE', 'BLUE_GREEN'] - if x not in valid_values: - raise ValueError("Deployment Type value must be one of: %s" % - ', '.join(valid_values)) - return x - - -class AutoRollbackConfiguration(AWSProperty): - props = { - 'Enabled': (bool, False), - 'Events': ([basestring], False) - } - - -class Deployment(AWSProperty): - props = { - 'Description': (basestring, False), - 'IgnoreApplicationStopFailures': (bool, False), - 'Revision': (Revision, True), - } - - -class DeploymentStyle(AWSProperty): - props = { - 'DeploymentOption': (deployment_option_validator, False), - 'DeploymentType': (deployment_type_validator, False), - } - - -class Ec2TagFilters(AWSProperty): - props = { - 'Key': (basestring, False), - 'Type': (basestring, True), - 'Value': (basestring, False), - } - - -class TagFilters(AWSProperty): - props = { - 'Key': (basestring, False), - 'Type': (basestring, False), - 'Value': (basestring, False) - } - - -class ElbInfoList(AWSProperty): - props = { - 'Name': (basestring, False) - } - - -class TargetGroupInfoList(AWSProperty): - props = { - 'Name': (basestring, False) - } - - -class LoadBalancerInfo(AWSProperty): - props = { - 'ElbInfoList': ([ElbInfoList], False), - 'TargetGroupInfoList': ([TargetGroupInfoList], False), - } - - def validate(self): - conds = [ - 'ElbInfoList', - 'TargetGroupInfoList' - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class OnPremisesInstanceTagFilters(AWSProperty): - props = { - 'Key': (basestring, False), - 'Type': (basestring, False), - 'Value': (basestring, False), - } - - -class MinimumHealthyHosts(AWSProperty): - props = { - 'Type': (basestring, False), - 'Value': (positive_integer, False), - } - - -class Application(AWSObject): - resource_type = "AWS::CodeDeploy::Application" - - props = { - 'ApplicationName': (basestring, False), - 'ComputePlatform': (basestring, False), - } - - -class DeploymentConfig(AWSObject): - resource_type = "AWS::CodeDeploy::DeploymentConfig" - - props = { - 'DeploymentConfigName': (basestring, False), - 'MinimumHealthyHosts': (MinimumHealthyHosts, False), - } - - -class Alarm(AWSProperty): - props = { - 'Name': (basestring, False), - } - - -class AlarmConfiguration(AWSProperty): - props = { - 'Alarms': ([Alarm], False), - 'Enabled': (boolean, False), - 'IgnorePollAlarmFailure': (boolean, False), - } - - -class TriggerConfig(AWSProperty): - props = { - 'TriggerEvents': ([basestring], False), - 'TriggerName': (basestring, False), - 'TriggerTargetArn': (basestring, False), - } - - -class Ec2TagSetListObject(AWSProperty): - props = { - 'Ec2TagGroup': ([Ec2TagFilters], False) - } - - -class Ec2TagSet(AWSProperty): - props = { - 'Ec2TagSetList': ([Ec2TagSetListObject], False) - } - - -class OnPremisesTagSetObject(AWSProperty): - props = { - 'OnPremisesTagGroup': ([TagFilters], False) - } - - -class OnPremisesTagSetList(AWSProperty): - props = { - 'OnPremisesTagSetList': ([OnPremisesTagSetObject], False) - } - - -class OnPremisesTagSet(AWSProperty): - props = { - 'OnPremisesTagSetList': (OnPremisesTagSetList, False) - } - - -class DeploymentGroup(AWSObject): - resource_type = "AWS::CodeDeploy::DeploymentGroup" - - props = { - 'AlarmConfiguration': (AlarmConfiguration, False), - 'ApplicationName': (basestring, True), - 'AutoRollbackConfiguration': (AutoRollbackConfiguration, False), - 'AutoScalingGroups': ([basestring], False), - 'Deployment': (Deployment, False), - 'DeploymentConfigName': (basestring, False), - 'DeploymentGroupName': (basestring, False), - 'DeploymentStyle': (DeploymentStyle, False), - 'Ec2TagFilters': ([Ec2TagFilters], False), - 'Ec2TagSet': (Ec2TagSet, False), - 'LoadBalancerInfo': (LoadBalancerInfo, False), - 'OnPremisesInstanceTagFilters': ( - [OnPremisesInstanceTagFilters], False - ), - 'OnPremisesInstanceTagSet': (OnPremisesTagSet, False), - 'ServiceRoleArn': (basestring, True), - 'TriggerConfigurations': ([TriggerConfig], False), - } - - def validate(self): - ec2_conds = [ - 'EC2TagFilters', - 'Ec2TagSet' - ] - onPremises_conds = [ - 'OnPremisesInstanceTagFilters', - 'OnPremisesInstanceTagSet' - ] - mutually_exclusive(self.__class__.__name__, - self.properties, ec2_conds) - mutually_exclusive(self.__class__.__name__, - self.properties, onPremises_conds) diff --git a/troposphere/codepipeline.py b/troposphere/codepipeline.py deleted file mode 100644 index ac1f254b8..000000000 --- a/troposphere/codepipeline.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -class ActionTypeId(AWSProperty): - props = { - 'Category': (basestring, True), - 'Owner': (basestring, True), - 'Provider': (basestring, True), - 'Version': (basestring, True) - } - - -class ArtifactDetails(AWSProperty): - props = { - 'MaximumCount': (integer, True), - 'MinimumCount': (integer, True) - } - - -class Blockers(AWSProperty): - props = { - 'Name': (basestring, True), - 'Type': (basestring, True) - } - - -class ConfigurationProperties(AWSProperty): - props = { - 'Description': (basestring, False), - 'Key': (boolean, True), - 'Name': (basestring, True), - 'Queryable': (boolean, False), - 'Required': (boolean, True), - 'Secret': (boolean, True), - 'Type': (basestring, False) - } - - -class EncryptionKey(AWSProperty): - props = { - 'Id': (basestring, True), - 'Type': (basestring, True) - } - - -class DisableInboundStageTransitions(AWSProperty): - props = { - 'Reason': (basestring, True), - 'StageName': (basestring, True) - } - - -class InputArtifacts(AWSProperty): - props = { - 'Name': (basestring, True) - } - - -class OutputArtifacts(AWSProperty): - props = { - 'Name': (basestring, True) - } - - -class Settings(AWSProperty): - props = { - 'EntityUrlTemplate': (basestring, False), - 'ExecutionUrlTemplate': (basestring, False), - 'RevisionUrlTemplate': (basestring, False), - 'ThirdPartyConfigurationUrl': (basestring, False) - } - - -class ArtifactStore(AWSProperty): - props = { - 'EncryptionKey': (EncryptionKey, False), - 'Location': (basestring, True), - 'Type': (basestring, True) - } - - -class ArtifactStoreMap(AWSProperty): - props = { - 'ArtifactStore': (ArtifactStore, True), - 'Region': (basestring, True) - } - - -class Actions(AWSProperty): - props = { - 'ActionTypeId': (ActionTypeId, True), - 'Configuration': (dict, False), - 'InputArtifacts': ([InputArtifacts], False), - 'Name': (basestring, True), - 'OutputArtifacts': ([OutputArtifacts], False), - 'Region': (basestring, False), - 'RoleArn': (basestring, False), - 'RunOrder': (integer, False) - } - - -class Stages(AWSProperty): - props = { - 'Actions': ([Actions], True), - 'Blockers': ([Blockers], False), - 'Name': (basestring, True) - } - - -class CustomActionType(AWSObject): - resource_type = "AWS::CodePipeline::CustomActionType" - - props = { - 'Category': (basestring, True), - 'ConfigurationProperties': ([ConfigurationProperties], False), - 'InputArtifactDetails': (ArtifactDetails, True), - 'OutputArtifactDetails': (ArtifactDetails, True), - 'Provider': (basestring, True), - 'Settings': (Settings, False), - 'Version': (basestring, False) - } - - -class Pipeline(AWSObject): - resource_type = "AWS::CodePipeline::Pipeline" - - props = { - 'ArtifactStore': (ArtifactStore, False), - 'ArtifactStores': ([ArtifactStoreMap], False), - 'DisableInboundStageTransitions': - ([DisableInboundStageTransitions], False), - 'Name': (basestring, False), - 'RestartExecutionOnUpdate': (boolean, False), - 'RoleArn': (basestring, True), - 'Stages': ([Stages], True) - } - - -class WebhookAuthConfiguration(AWSProperty): - props = { - 'AllowedIPRange': (basestring, False), - 'SecretToken': (basestring, False), - } - - -class WebhookFilterRule(AWSProperty): - props = { - 'JsonPath': (basestring, True), - 'MatchEquals': (basestring, False), - } - - -class Webhook(AWSObject): - resource_type = "AWS::CodePipeline::Webhook" - - props = { - 'Authentication': (basestring, True), - 'AuthenticationConfiguration': (WebhookAuthConfiguration, True), - 'Filters': ([WebhookFilterRule], True), - 'Name': (basestring, False), - 'RegisterWithThirdParty': (boolean, False), - 'TargetAction': (basestring, True), - 'TargetPipeline': (basestring, True), - 'TargetPipelineVersion': (integer, True), - } diff --git a/troposphere/cognito.py b/troposphere/cognito.py deleted file mode 100644 index 5daf547cb..000000000 --- a/troposphere/cognito.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright (c) 2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, positive_integer - - -class CognitoIdentityProvider(AWSProperty): - props = { - 'ClientId': (basestring, False), - 'ProviderName': (basestring, False), - 'ServerSideTokenCheck': (bool, False), - } - - -class CognitoStreams(AWSProperty): - props = { - 'RoleArn': (basestring, False), - 'StreamingStatus': (basestring, False), - 'StreamName': (basestring, False), - } - - -class PushSync(AWSProperty): - props = { - 'ApplicationArns': ([basestring], False), - 'RoleArn': (basestring, False), - } - - -class IdentityPool(AWSObject): - resource_type = "AWS::Cognito::IdentityPool" - - props = { - 'AllowUnauthenticatedIdentities': (bool, True), - 'CognitoEvents': (dict, False), - 'CognitoIdentityProviders': ([CognitoIdentityProvider], False), - 'CognitoStreams': (CognitoStreams, False), - 'DeveloperProviderName': (basestring, False), - 'IdentityPoolName': (basestring, False), - 'OpenIdConnectProviderARNs': ([basestring], False), - 'PushSync': (PushSync, False), - 'SamlProviderARNs': ([basestring], False), - 'SupportedLoginProviders': (dict, False), - } - - -class MappingRule(AWSProperty): - props = { - 'Claim': (basestring, True), - 'MatchType': (basestring, True), - 'RoleARN': (basestring, True), - 'Value': (basestring, True), - } - - -class RulesConfiguration(AWSProperty): - props = { - 'Rules': ([MappingRule], True), - } - - -class RoleMapping(AWSProperty): - props = { - 'AmbiguousRoleResolution': (basestring, False), - 'RulesConfiguration': (RulesConfiguration, False), - 'Type': (basestring, True), - } - - -class IdentityPoolRoleAttachment(AWSObject): - resource_type = "AWS::Cognito::IdentityPoolRoleAttachment" - - props = { - 'IdentityPoolId': (basestring, True), - 'RoleMappings': (dict, False), - 'Roles': (dict, False), - } - - -class InviteMessageTemplate(AWSProperty): - props = { - 'EmailMessage': (basestring, False), - 'EmailSubject': (basestring, False), - 'SMSMessage': (basestring, False), - } - - -class AdminCreateUserConfig(AWSProperty): - props = { - 'AllowAdminCreateUserOnly': (boolean, False), - 'InviteMessageTemplate': (InviteMessageTemplate, False), - 'UnusedAccountValidityDays': (positive_integer, False), - } - - -class DeviceConfiguration(AWSProperty): - props = { - 'ChallengeRequiredOnNewDevice': (boolean, False), - 'DeviceOnlyRememberedOnUserPrompt': (boolean, False), - } - - -class EmailConfiguration(AWSProperty): - props = { - 'ReplyToEmailAddress': (basestring, False), - 'SourceArn': (basestring, False), - } - - -class LambdaConfig(AWSProperty): - props = { - 'CreateAuthChallenge': (basestring, False), - 'CustomMessage': (basestring, False), - 'DefineAuthChallenge': (basestring, False), - 'PostAuthentication': (basestring, False), - 'PostConfirmation': (basestring, False), - 'PreAuthentication': (basestring, False), - 'PreSignUp': (basestring, False), - 'VerifyAuthChallengeResponse': (basestring, False), - } - - -class PasswordPolicy(AWSProperty): - props = { - 'MinimumLength': (positive_integer, False), - 'RequireLowercase': (boolean, False), - 'RequireNumbers': (boolean, False), - 'RequireSymbols': (boolean, False), - 'RequireUppercase': (boolean, False), - } - - -class Policies(AWSProperty): - props = { - 'PasswordPolicy': (PasswordPolicy, False), - } - - -class NumberAttributeConstraints(AWSProperty): - props = { - 'MaxValue': (basestring, False), - 'MinValue': (basestring, False), - } - - -class StringAttributeConstraints(AWSProperty): - props = { - 'MaxLength': (basestring, False), - 'MinLength': (basestring, False), - } - - -class SchemaAttribute(AWSProperty): - props = { - 'AttributeDataType': (basestring, False), - 'DeveloperOnlyAttribute': (boolean, False), - 'Mutable': (boolean, False), - 'Name': (basestring, False), - 'NumberAttributeConstraints': (NumberAttributeConstraints, False), - 'StringAttributeConstraints': (StringAttributeConstraints, False), - 'Required': (boolean, False), - } - - -class SmsConfiguration(AWSProperty): - props = { - 'ExternalId': (basestring, False), - 'SnsCallerArn': (basestring, True), - } - - -class UserPool(AWSObject): - resource_type = "AWS::Cognito::UserPool" - - props = { - 'AdminCreateUserConfig': (AdminCreateUserConfig, False), - 'AliasAttributes': ([basestring], False), - 'AutoVerifiedAttributes': ([basestring], False), - 'DeviceConfiguration': (DeviceConfiguration, False), - 'EmailConfiguration': (EmailConfiguration, False), - 'EmailVerificationMessage': (basestring, False), - 'EmailVerificationSubject': (basestring, False), - 'LambdaConfig': (LambdaConfig, False), - 'MfaConfiguration': (basestring, False), - 'Policies': (Policies, False), - 'UserPoolName': (basestring, True), - 'Schema': ([SchemaAttribute], False), - 'SmsAuthenticationMessage': (basestring, False), - 'SmsConfiguration': (SmsConfiguration, False), - 'SmsVerificationMessage': (basestring, False), - 'UsernameAttributes': ([basestring], False), - 'UserPoolTags': (dict, False), - } - - -class UserPoolClient(AWSObject): - resource_type = "AWS::Cognito::UserPoolClient" - - props = { - 'ClientName': (basestring, False), - 'ExplicitAuthFlows': ([basestring], False), - 'GenerateSecret': (boolean, False), - 'ReadAttributes': ([basestring], False), - 'RefreshTokenValidity': (positive_integer, False), - 'UserPoolId': (basestring, True), - 'WriteAttributes': ([basestring], False), - } - - -class UserPoolGroup(AWSObject): - resource_type = "AWS::Cognito::UserPoolGroup" - - props = { - 'Description': (basestring, False), - 'GroupName': (basestring, True), - 'Precedence': (positive_integer, False), - 'RoleArn': (basestring, False), - 'UserPoolId': (basestring, True), - } - - -class AttributeType(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, False), - } - - -class UserPoolUser(AWSObject): - resource_type = "AWS::Cognito::UserPoolUser" - - props = { - 'DesiredDeliveryMediums': ([basestring], False), - 'ForceAliasCreation': (boolean, False), - 'UserAttributes': ([AttributeType], False), - 'MessageAction': (basestring, False), - 'Username': (basestring, False), - 'UserPoolId': (basestring, True), - 'ValidationData': ([AttributeType], False), - } - - -class UserPoolUserToGroupAttachment(AWSObject): - resource_type = "AWS::Cognito::UserPoolUserToGroupAttachment" - - props = { - 'GroupName': (basestring, True), - 'Username': (basestring, True), - 'UserPoolId': (basestring, True), - } diff --git a/troposphere/config.py b/troposphere/config.py deleted file mode 100644 index d9d2ed18c..000000000 --- a/troposphere/config.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean - - -ONE_HOUR = "One_Hour" -THREE_HOURS = "Three_Hours" -SIX_HOURS = "Six_Hours" -TWELVE_HOURS = "Twelve_Hours" -TWENTYFOUR_HOURS = "TwentyFour_Hours" - - -class Scope(AWSProperty): - props = { - 'ComplianceResourceId': (basestring, False), - 'ComplianceResourceTypes': ([basestring], False), - 'TagKey': (basestring, False), - 'TagValue': (basestring, False), - } - - -class SourceDetails(AWSProperty): - props = { - 'EventSource': (basestring, True), - 'MaximumExecutionFrequency': (basestring, False), - 'MessageType': (basestring, True), - } - - def validate(self): - valid_freqs = [ - ONE_HOUR, - THREE_HOURS, - SIX_HOURS, - TWELVE_HOURS, - TWENTYFOUR_HOURS, - ] - freq = self.properties.get('MaximumExecutionFrequency') - if freq and freq not in valid_freqs: - raise ValueError( - "MaximumExecutionFrequency (given: %s) must be one of: %s" % ( - freq, ', '.join(valid_freqs))) - - -class Source(AWSProperty): - props = { - 'Owner': (basestring, True), - 'SourceDetails': ([SourceDetails], False), - 'SourceIdentifier': (basestring, True), - } - - -class ConfigRule(AWSObject): - resource_type = "AWS::Config::ConfigRule" - - props = { - 'ConfigRuleName': (basestring, False), - 'Description': (basestring, False), - 'InputParameters': (dict, False), - 'MaximumExecutionFrequency': (basestring, False), - 'Scope': (Scope, False), - 'Source': (Source, True), - } - - -class AggregationAuthorization(AWSObject): - resource_type = "AWS::Config::AggregationAuthorization" - - props = { - 'AuthorizedAccountId': (basestring, True), - 'AuthorizedAwsRegion': (basestring, True), - } - - -class OrganizationAggregationSource(AWSProperty): - props = { - 'AllAwsRegions': (boolean, False), - 'AwsRegions': ([basestring], False), - 'RoleArn': (basestring, True), - } - - -class AccountAggregationSources(AWSProperty): - props = { - 'AccountIds': ([basestring], True), - 'AllAwsRegions': (boolean, False), - 'AwsRegions': ([basestring], False), - } - - -class ConfigurationAggregator(AWSObject): - resource_type = "AWS::Config::ConfigurationAggregator" - - props = { - 'AccountAggregationSources': ([AccountAggregationSources], False), - 'ConfigurationAggregatorName': (basestring, True), - 'OrganizationAggregationSource': - (OrganizationAggregationSource, False), - } - - -class RecordingGroup(AWSProperty): - props = { - 'AllSupported': (boolean, False), - 'IncludeGlobalResourceTypes': (boolean, False), - 'ResourceTypes': ([basestring], False), - } - - -class ConfigurationRecorder(AWSObject): - resource_type = "AWS::Config::ConfigurationRecorder" - - props = { - 'Name': (basestring, False), - 'RecordingGroup': (RecordingGroup, False), - 'RoleARN': (basestring, True), - } - - -class ConfigSnapshotDeliveryProperties(AWSProperty): - props = { - 'DeliveryFrequency': (basestring, False), - } - - -class DeliveryChannel(AWSObject): - resource_type = "AWS::Config::DeliveryChannel" - - props = { - 'ConfigSnapshotDeliveryProperties': - (ConfigSnapshotDeliveryProperties, False), - 'Name': (basestring, False), - 'S3BucketName': (basestring, True), - 'S3KeyPrefix': (basestring, False), - 'SnsTopicARN': (basestring, False), - } diff --git a/troposphere/constants.py b/troposphere/constants.py deleted file mode 100644 index 346ade926..000000000 --- a/troposphere/constants.py +++ /dev/null @@ -1,416 +0,0 @@ -# -# Regions -# - -AP_NORTHEAST_1 = 'ap-northeast-1' -AP_NORTHEAST_2 = 'ap-northeast-2' -AP_SOUTHEAST_1 = 'ap-southeast-1' -AP_SOUTHEAST_2 = 'ap-southeast-2' -AP_SOUTH_1 = 'ap-south-1' -CA_CENTRAL_1 = 'ca-central-1' -EU_WEST_1 = 'eu-west-1' -EU_WEST_2 = 'eu-west-2' -EU_WEST_3 = 'eu-west-3' -EU_CENTRAL_1 = 'eu-central-1' -EU_NORTH_1 = 'eu-north-1' -SA_EAST_1 = 'sa-east-1' -US_EAST_1 = 'us-east-1' -US_EAST_2 = 'us-east-2' -US_WEST_1 = 'us-west-1' -US_WEST_2 = 'us-west-2' - -# -# Availability Zones -# - -AP_NORTHEAST_1A = 'ap-northeast-1a' -AP_NORTHEAST_1B = 'ap-northeast-1b' -AP_NORTHEAST_1C = 'ap-northeast-1c' - -AP_NORTHEAST_2A = 'ap-northeast-2a' -AP_NORTHEAST_2B = 'ap-northeast-2b' -AP_NORTHEAST_2C = 'ap-northeast-2c' - -AP_SOUTHEAST_1A = 'ap-southeast-1a' -AP_SOUTHEAST_1B = 'ap-southeast-1b' -AP_SOUTHEAST_1C = 'ap-southeast-1c' - -AP_SOUTHEAST_2A = 'ap-southeast-2a' -AP_SOUTHEAST_2B = 'ap-southeast-2b' -AP_SOUTHEAST_2C = 'ap-southeast-2c' - -AP_SOUTH_1A = 'ap-south-1a' -AP_SOUTH_1B = 'ap-south-1b' - -CA_CENTRAL_1A = 'ca-central-1a' -CA_CENTRAL_1B = 'ca-central-1b' - -EU_WEST_1A = 'eu-west-1a' -EU_WEST_1B = 'eu-west-1b' -EU_WEST_1C = 'eu-west-1c' - -EU_WEST_2A = 'eu-west-2a' -EU_WEST_2B = 'eu-west-2b' -EU_WEST_2C = 'eu-west-2c' - -EU_WEST_3A = 'eu-west-3a' -EU_WEST_3B = 'eu-west-3b' -EU_WEST_3C = 'eu-west-3c' - -EU_CENTRAL_1A = 'eu-central-1a' -EU_CENTRAL_1B = 'eu-central-1b' -EU_CENTRAL_1C = 'eu-central-1c' - -EU_NORTH_1A = 'eu-north-1a' -EU_NORTH_1B = 'eu-north-1b' -EU_NORTH_1C = 'eu-north-1c' - -SA_EAST_1A = 'sa-east-1a' -SA_EAST_1B = 'sa-east-1b' -SA_EAST_1C = 'sa-east-1c' - -US_EAST_1A = 'us-east-1a' -US_EAST_1B = 'us-east-1b' -US_EAST_1C = 'us-east-1c' -US_EAST_1D = 'us-east-1d' -US_EAST_1E = 'us-east-1e' -US_EAST_1F = 'us-east-1f' - -US_EAST_2A = 'us-east-2a' -US_EAST_2B = 'us-east-2b' -US_EAST_2C = 'us-east-2c' - -US_WEST_1A = 'us-west-1a' -US_WEST_1B = 'us-west-1b' -US_WEST_1C = 'us-west-1c' - -US_WEST_2A = 'us-west-2a' -US_WEST_2B = 'us-west-2b' -US_WEST_2C = 'us-west-2c' - -# -# Networking -# - -QUAD_ZERO = '0.0.0.0/0' -VPC_CIDR_16 = '10.0.0.0/16' - -SSH_PORT = 22 -MONGODB_PORT = 27017 -NTP_PORT = 123 -SMTP_PORT_25 = 25 -SMTP_PORT_587 = 587 -HTTP_PORT = 80 -HTTPS_PORT = 443 -REDIS_PORT = 6379 -MEMCACHED_PORT = 11211 -POSTGRESQL_PORT = 5432 - -TCP_PROTOCOL = 6 -UDP_PROTOCOL = 17 -ICMP_PROTOCOL = 1 -ALL_PROTOCOL = -1 - -# -# EC2 instance types -# - -T2_NANO = 't2.nano' -T2_MICRO = 't2.micro' -T2_SMALL = 't2.small' -T2_MEDIUM = 't2.medium' -T2_LARGE = 't2.large' -T2_XLARGE = 't2.xlarge' -T2_2XLARGE = 't2.2xlarge' - -T3_NANO = 't3.nano' -T3_MICRO = 't3.micro' -T3_SMALL = 't3.small' -T3_MEDIUM = 't3.medium' -T3_LARGE = 't3.large' -T3_XLARGE = 't3.xlarge' -T3_2XLARGE = 't3.2xlarge' - -M5_LARGE = 'm5.large' -M5_XLARGE = 'm5.xlarge' -M5_2XLARGE = 'm5.2xlarge' -M5_4XLARGE = 'm5.4xlarge' -M5_12XLARGE = 'm5.12xlarge' -M5_24XLARGE = 'm5.24xlarge' - -M4_LARGE = 'm4.large' -M4_XLARGE = 'm4.xlarge' -M4_2XLARGE = 'm4.2xlarge' -M4_4XLARGE = 'm4.4xlarge' -M4_10XLARGE = 'm4.10xlarge' -M4_16XLARGE = 'm4.16xlarge' - -M3_MEDIUM = 'm3.medium' -M3_LARGE = 'm3.large' -M3_XLARGE = 'm3.xlarge' -M3_2XLARGE = 'm3.2xlarge' - -C3_LARGE = 'c3.large' -C3_XLARGE = 'c3.xlarge' -C3_2XLARGE = 'c3.2xlarge' -C3_4XLARGE = 'c3.4xlarge' -C3_8XLARGE = 'c3.8xlarge' - -C4_LARGE = 'c4.large' -C4_XLARGE = 'c4.xlarge' -C4_2XLARGE = 'c4.2xlarge' -C4_4XLARGE = 'c4.4xlarge' -C4_8XLARGE = 'c4.8xlarge' - -C5_LARGE = 'c5.large' -C5_XLARGE = 'c5.xlarge' -C5_2XLARGE = 'c5.2xlarge' -C5_4XLARGE = 'c5.4xlarge' -C5_9XLARGE = 'c5.9xlarge' -C5_18XLARGE = 'c5.18xlarge' - -C5D_LARGE = 'c5d.large' -C5D_XLARGE = 'c5d.xlarge' -C5D_2XLARGE = 'c5d.2xlarge' -C5D_4XLARGE = 'c5d.4xlarge' -C5D_9XLARGE = 'c5d.9xlarge' -C5D_18XLARGE = 'c5d.18xlarge' - -R3_LARGE = 'r3.large' -R3_XLARGE = 'r3.xlarge' -R3_2XLARGE = 'r3.2xlarge' -R3_4XLARGE = 'r3.4xlarge' -R3_8XLARGE = 'r3.8xlarge' - -G2_2XLARGE = 'g2.2xlarge' -G2_8XLARGE = 'g2.8xlarge' - -G3_2XLARGE = 'g3.2xlarge' -G3_8XLARGE = 'g3.8xlarge' -G3_16XLARGE = 'g3.16xlarge' - -I2_XLARGE = 'i2.xlarge' -I2_2XLARGE = 'i2.2xlarge' -I2_4XLARGE = 'i2.4xlarge' -I2_8XLARGE = 'i2.8xlarge' - -H1_2XLARGE = 'h1.2xlarge' -H1_4XLARGE = 'h1.4xlarge' -H1_8XLARGE = 'h1.8xlarge' -H1_16XLARGE = 'h1.16xlarge' - -I3_LARGE = 'i3.large' -I3_XLARGE = 'i3.xlarge' -I3_2XLARGE = 'i3.2xlarge' -I3_4XLARGE = 'i3.4xlarge' -I3_8XLARGE = 'i3.8xlarge' -I3_16XLARGE = 'i3.16xlarge' -I3_METAL = 'i3.metal' - -D2_XLARGE = 'd2.xlarge' -D2_2XLARGE = 'd2.2xlarge' -D2_4XLARGE = 'd2.4xlarge' -D2_8XLARGE = 'd2.8xlarge' - -HS1_8XLARGE = 'hs1.8xlarge' - -M1_SMALL = 'm1.small' -M1_MEDIUM = 'm1.medium' -M1_LARGE = 'm1.large' -M1_XLARGE = 'm1.xlarge' - -C1_MEDIUM = 'c1.medium' -C1_XLARGE = 'c1.xlarge' -CC2_8XLARGE = 'cc2.8xlarge' - -CG1_4XLARGE = 'cg1.4xlarge' - -M2_XLARGE = 'm2.xlarge' -M2_2XLARGE = 'm2.2xlarge' -M2_4XLARGE = 'm2.4xlarge' -CR1_8XLARGE = 'cr1.8xlarge' - -HI1_4XLARGE = 'hi1.4xlarge' - -T1_MICRO = 't1.micro' - -X1_32XLARGE = 'x1.32xlarge' -X1_16XLARGE = 'x1.16xlarge' - -X1E_XLARGE = 'x1e.xlarge' -X1E_2XLARGE = 'x1e.2xlarge' -X1E_4XLARGE = 'x1e.4xlarge' -X1E_8XLARGE = 'x1e.8xlarge' -X1E_16XLARGE = 'x1e.16xlarge' -X1E_32XLARGE = 'x1e.32xlarge' - -R4_LARGE = 'r4.large' -R4_XLARGE = 'r4.xlarge' -R4_2XLARGE = 'r4.2xlarge' -R4_4XLARGE = 'r4.4xlarge' -R4_8XLARGE = 'r4.8xlarge' -R4_16XLARGE = 'r4.16xlarge' - -P2_XLARGE = 'p2.xlarge' -P2_8XLARGE = 'p2.8xlarge' -P2_16XLARGE = 'p2.16xlarge' - -P3_2XLARGE = 'p3.2xlarge' -P3_8XLARGE = 'p3.8xlarge' -P3_16XLARGE = 'p3.16xlarge' - -F1_2XLARGE = 'f1.2xlarge' -F1_16XLARGE = 'f1.16xlarge' - -I3_LARGE = 'i3.large' -I3_XLARGE = 'i3.xlarge' -I3_2XLARGE = 'i3.2xlarge' -I3_4XLARGE = 'i3.4xlarge' -I3_8XLARGE = 'i3.8xlarge' -I3_16XLARGE = 'i3.16xlarge' - -# -# RDS DB instance classes -# - -DB_M3_MEDIUM = 'db.m3.medium' -DB_M3_LARGE = 'db.m3.large' -DB_M3_XLARGE = 'db.m3.xlarge' -DB_M3_2XLARGE = 'db.m3.2xlarge' - -DB_R3_LARGE = 'db.r3.large' -DB_R3_XLARGE = 'db.r3.xlarge' -DB_R3_2XLARGE = 'db.r3.2xlarge' -DB_R3_4XLARGE = 'db.r3.4xlarge' -DB_R3_8XLARGE = 'db.r3.8xlarge' - -DB_T2_MICRO = 'db.t2.micro' -DB_T2_SMALL = 'db.t2.small' -DB_T2_MEDIUM = 'db.t2.medium' - -DB_M1_SMALL = 'db.m1.small' -DB_M1_MEDIUM = 'db.m1.medium' -DB_M1_LARGE = 'db.m1.large' -DB_M1_XLARGE = 'db.m1.xlarge' - -DB_M2_XLARGE = 'db.m2.xlarge' -DB_M2_2XLARGE = 'db.m2.2xlarge' -DB_M2_4XLARGE = 'db.m2.4xlarge' -DB_CR1_8XLARGE = 'db.cr1.8xlarge' - -DB_T1_MICRO = 'db.t1.micro' - -# -# ElastiCache node types -# - -CACHE_T2_MICRO = 'cache.t2.micro' -CACHE_T2_SMALL = 'cache.t2.small' -CACHE_T2_MEDIUM = 'cache.t2.medium' - -CACHE_M3_MEDIUM = 'cache.m3.medium' -CACHE_M3_LARGE = 'cache.m3.large' -CACHE_M3_XLARGE = 'cache.m3.xlarge' -CACHE_M3_2XLARGE = 'cache.m3.2xlarge' - -CACHE_R3_LARGE = 'cache.r3.large' -CACHE_R3_XLARGE = 'cache.r3.xlarge' -CACHE_R3_2XLARGE = 'cache.r3.2xlarge' -CACHE_R3_4XLARGE = 'cache.r3.4xlarge' -CACHE_R3_8XLARGE = 'cache.r3.8xlarge' - -CACHE_M1_SMALL = 'cache.m1.small' -CACHE_M1_MEDIUM = 'cache.m1.medium' -CACHE_M1_LARGE = 'cache.m1.large' -CACHE_M1_XLARGE = 'cache.m1.xlarge' - -CACHE_M2_XLARGE = 'cache.m2.xlarge' -CACHE_M2_2XLARGE = 'cache.m2.2xlarge' -CACHE_M2_4XLARGE = 'cache.m2.4xlarge' - -CACHE_C1_XLARGE = 'cache.c1.xlarge' - -CACHE_T1_MICRO = 'cache.t1.micro' - -# -# Elasticsearch instance types -# - -ELASTICSEARCH_T2_MICRO = 't2.micro.elasticsearch' -ELASTICSEARCH_T2_SMALL = 't2.small.elasticsearch' -ELASTICSEARCH_T2_MEDIUM = 't2.medium.elasticsearch' - -ELASTICSEARCH_M3_MEDIUM = 'm3.medium.elasticsearch' -ELASTICSEARCH_M3_LARGE = 'm3.large.elasticsearch' -ELASTICSEARCH_M3_XLARGE = 'm3.xlarge.elasticsearch' -ELASTICSEARCH_M3_2XLARGE = 'm3.2xlarge.elasticsearch' - -ELASTICSEARCH_M4_LARGE = 'm4.large.elasticsearch' -ELASTICSEARCH_M4_XLARGE = 'm4.xlarge.elasticsearch' -ELASTICSEARCH_M4_2XLARGE = 'm4.2xlarge.elasticsearch' -ELASTICSEARCH_M4_4XLARGE = 'm4.4xlarge.elasticsearch' -ELASTICSEARCH_M4_10XLARGE = 'm4.10xlarge.elasticsearch' - -ELASTICSEARCH_C4_LARGE = 'c4.large.elasticsearch' -ELASTICSEARCH_C4_XLARGE = 'c4.xlarge.elasticsearch' -ELASTICSEARCH_C4_2XLARGE = 'c4.2xlarge.elasticsearch' -ELASTICSEARCH_C4_4XLARGE = 'c4.4xlarge.elasticsearch' -ELASTICSEARCH_C4_8XLARGE = 'c4.8xlarge.elasticsearch' - -ELASTICSEARCH_R3_LARGE = 'r3.large.elasticsearch' -ELASTICSEARCH_R3_XLARGE = 'r3.xlarge.elasticsearch' -ELASTICSEARCH_R3_2XLARGE = 'r3.2xlarge.elasticsearch' -ELASTICSEARCH_R3_4XLARGE = 'r3.4xlarge.elasticsearch' -ELASTICSEARCH_R3_8XLARGE = 'r3.8xlarge.elasticsearch' - -ELASTICSEARCH_R4_LARGE = 'r4.large.elasticsearch' -ELASTICSEARCH_R4_XLARGE = 'r4.xlarge.elasticsearch' -ELASTICSEARCH_R4_2XLARGE = 'r4.2xlarge.elasticsearch' -ELASTICSEARCH_R4_4XLARGE = 'r4.4xlarge.elasticsearch' -ELASTICSEARCH_R4_8XLARGE = 'r4.8xlarge.elasticsearch' -ELASTICSEARCH_R4_16XLARGE = 'r4.16xlarge.elasticsearch' - -ELASTICSEARCH_I2_XLARGE = 'i2.xlarge.elasticsearch' -ELASTICSEARCH_I2_2XLARGE = 'i2.2xlarge.elasticsearch' - -# -# Parameter types -# - -STRING = 'String' -NUMBER = 'Number' -LIST_OF_NUMBERS = 'List' -COMMA_DELIMITED_LIST = 'CommaDelimitedList' - -AVAILABILITY_ZONE_NAME = 'AWS::EC2::AvailabilityZone::Name' -IMAGE_ID = 'AWS::EC2::Image::Id' -INSTANCE_ID = 'AWS::EC2::Instance::Id' -KEY_PAIR_NAME = 'AWS::EC2::KeyPair::KeyName' -SECURITY_GROUP_NAME = 'AWS::EC2::SecurityGroup::GroupName' -SECURITY_GROUP_ID = 'AWS::EC2::SecurityGroup::Id' -SUBNET_ID = 'AWS::EC2::Subnet::Id' -VOLUME_ID = 'AWS::EC2::Volume::Id' -VPC_ID = 'AWS::EC2::VPC::Id' -HOSTED_ZONE_ID = 'AWS::Route53::HostedZone::Id' - -LIST_OF_AVAILABILITY_ZONE_NAMES = 'List' -LIST_OF_IMAGE_ID = 'List' -LIST_OF_INSTANCE_IDS = 'List' -LIST_OF_SECURITY_GROUP_NAMES = 'List' -LIST_OF_SECURITY_GROUP_IDS = 'List' -LIST_OF_SUBNET_IDS = 'List' -LIST_OF_VOLUME_IDS = 'List' -LIST_OF_VPC_IDS = 'List' -LIST_OF_HOSTED_ZONE_IDS = 'List' - -# -# Logs -# -LOGS_ALLOWED_RETENTION_DAYS = [1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, - 365, 400, 545, 731, 1827, 3653] - -# -# Route53 -# - -CLOUDFRONT_HOSTEDZONEID = 'Z2FDTNDATAQYW2' diff --git a/troposphere/datapipeline.py b/troposphere/datapipeline.py deleted file mode 100644 index c10c5b85c..000000000 --- a/troposphere/datapipeline.py +++ /dev/null @@ -1,60 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import boolean - - -class ParameterObjectAttribute(AWSProperty): - props = { - 'Key': (basestring, True), - 'StringValue': (basestring, False), - } - - -class ParameterObject(AWSProperty): - props = { - 'Attributes': ([ParameterObjectAttribute], True), - 'Id': (basestring, True), - } - - -class ParameterValue(AWSProperty): - props = { - 'Id': (basestring, True), - 'StringValue': (basestring, True), - } - - -class ObjectField(AWSProperty): - props = { - 'Key': (basestring, True), - 'RefValue': (basestring, False), - 'StringValue': (basestring, False), - } - - -class PipelineObject(AWSProperty): - props = { - 'Fields': ([ObjectField], True), - 'Id': (basestring, True), - 'Name': (basestring, True), - } - - -class PipelineTag(AWSProperty): - props = { - 'Key': (basestring, True), - 'Value': (basestring, True), - } - - -class Pipeline(AWSObject): - resource_type = "AWS::DataPipeline::Pipeline" - - props = { - 'Activate': (boolean, False), - 'Description': (basestring, False), - 'Name': (basestring, True), - 'ParameterObjects': ([ParameterObject], False), - 'ParameterValues': ([ParameterValue], False), - 'PipelineObjects': ([PipelineObject], True), - 'PipelineTags': ([PipelineTag], False), - } diff --git a/troposphere/dax.py b/troposphere/dax.py deleted file mode 100644 index 611c7a763..000000000 --- a/troposphere/dax.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean - - -class SSESpecification(AWSProperty): - props = { - 'SSEEnabled': (boolean, False), - } - - -class Cluster(AWSObject): - resource_type = "AWS::DAX::Cluster" - - props = { - 'AvailabilityZones': (basestring, False), - 'ClusterName': (basestring, False), - 'Description': (basestring, False), - 'IAMRoleARN': (basestring, True), - 'NodeType': (basestring, True), - 'NotificationTopicARN': (basestring, False), - 'ParameterGroupName': (basestring, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'ReplicationFactor': (basestring, True), - 'SSESpecification': (SSESpecification, False), - 'SecurityGroupIds': ([basestring], False), - 'SubnetGroupName': (basestring, True), - 'Tags': (dict, False), - } - - -class ParameterGroup(AWSObject): - resource_type = "AWS::DAX::ParameterGroup" - - props = { - 'Description': (basestring, False), - 'ParameterGroupName': (basestring, False), - 'ParameterNameValues': (dict, False), - } - - -class SubnetGroup(AWSObject): - resource_type = "AWS::DAX::SubnetGroup" - - props = { - 'Description': (basestring, False), - 'SubnetGroupName': (basestring, False), - 'SubnetIds': ([basestring], False), - } diff --git a/troposphere/directoryservice.py b/troposphere/directoryservice.py deleted file mode 100644 index f9371881a..000000000 --- a/troposphere/directoryservice.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean - - -class VpcSettings(AWSProperty): - props = { - 'SubnetIds': ([basestring], True), - 'VpcId': (basestring, True), - } - - -class MicrosoftAD(AWSObject): - resource_type = "AWS::DirectoryService::MicrosoftAD" - - props = { - 'CreateAlias': (boolean, False), - 'Edition': (basestring, False), - 'EnableSso': (boolean, False), - 'Name': (basestring, True), - 'Password': (basestring, True), - 'ShortName': (basestring, False), - 'VpcSettings': (VpcSettings, True) - } - - -class SimpleAD(AWSObject): - resource_type = "AWS::DirectoryService::SimpleAD" - - props = { - 'CreateAlias': (boolean, False), - 'Description': (basestring, False), - 'EnableSso': (boolean, False), - 'Name': (basestring, True), - 'Password': (basestring, True), - 'ShortName': (basestring, False), - 'Size': (basestring, True), - 'VpcSettings': (VpcSettings, True), - } diff --git a/troposphere/dlm.py b/troposphere/dlm.py deleted file mode 100644 index 5b2373906..000000000 --- a/troposphere/dlm.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import (integer, boolean) - -VALID_STATES = ('ENABLED', 'DISABLED') -VALID_RESOURCE_TYPES = ('VOLUME') -VALID_INTERVALS = (12, 24) -VALID_INTERVAL_UNITS = ('HOURS') - - -def validate_interval(interval): - """Interval validation rule.""" - - if interval not in VALID_INTERVALS: - raise ValueError("Interval must be one of : %s" % - ", ".join(VALID_INTERVALS)) - return interval - - -def validate_interval_unit(interval_unit): - """Interval unit validation rule.""" - - if interval_unit not in VALID_INTERVAL_UNITS: - raise ValueError("Interval unit must be one of : %s" % - ", ".join(VALID_INTERVAL_UNITS)) - return interval_unit - - -def validate_state(state): - """State validation rule.""" - - if state not in VALID_STATES: - raise ValueError("State must be one of : %s" % - ", ".join(VALID_STATES)) - return state - - -class CreateRule(AWSProperty): - props = { - 'Interval': (validate_interval, True), - 'IntervalUnit': (validate_interval_unit, True), - 'Times': ([basestring], False), - } - - -class RetainRule(AWSProperty): - props = { - 'Count': (integer, True), - } - - -class Schedule(AWSProperty): - props = { - 'CopyTags': (boolean, False), - 'CreateRule': (CreateRule, False), - 'Name': (basestring, False), - 'RetainRule': (RetainRule, False), - 'TagsToAdd': ((Tags, list), False), - } - - -class PolicyDetails(AWSProperty): - props = { - 'ResourceTypes': ([basestring], False), - 'Schedules': ([Schedule], False), - 'TargetTags': ((Tags, list), False), - } - - -class LifecyclePolicy(AWSObject): - resource_type = "AWS::DLM::LifecyclePolicy" - - props = { - 'Description': (basestring, False), - 'ExecutionRoleArn': (basestring, False), - 'PolicyDetails': (PolicyDetails, False), - 'State': (validate_state, False), - } diff --git a/troposphere/dms.py b/troposphere/dms.py deleted file mode 100644 index 7c728aa78..000000000 --- a/troposphere/dms.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright (c) 2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer, network_port, positive_integer - - -CDC = "cdc" -FULL_LOAD = "full-load" -FULL_LOAD_AND_CDC = "full-load-and-cdc" - - -class Certificate(AWSObject): - resource_type = "AWS::DMS::Certificate" - - props = { - 'CertificateIdentifier': (basestring, False), - 'CertificatePem': (basestring, False), - 'CertificateWallet': (basestring, False), - } - - -class DynamoDBSettings(AWSProperty): - props = { - 'ServiceAccessRoleArn': (basestring, True), - } - - -class MongoDbSettings(AWSProperty): - props = { - 'AuthMechanism': (basestring, False), - 'AuthSource': (basestring, False), - 'DatabaseName': (basestring, False), - 'DocsToInvestigate': (basestring, False), - 'ExtractDocId': (basestring, False), - 'KmsKeyId': (basestring, False), - 'NestingLevel': (basestring, False), - 'Password': (basestring, False), - 'Port': (network_port, False), - 'ServerName': (basestring, False), - 'Username': (basestring, False), - } - - -class S3Settings(AWSProperty): - props = { - 'BucketFolder': (basestring, False), - 'BucketName': (basestring, False), - 'CompressionType': (basestring, False), - 'CsvDelimiter': (basestring, False), - 'CsvRowDelimiter': (basestring, False), - 'ExternalTableDefinition': (basestring, False), - 'ServiceAccessRoleArn': (basestring, False), - } - - -class Endpoint(AWSObject): - resource_type = "AWS::DMS::Endpoint" - - props = { - 'CertificateArn': (basestring, False), - 'DatabaseName': (basestring, False), - 'DynamoDbSettings': (DynamoDBSettings, False), - 'EndpointIdentifier': (basestring, False), - 'EndpointType': (basestring, True), - 'EngineName': (basestring, True), - 'ExtraConnectionAttributes': (basestring, False), - 'KmsKeyId': (basestring, False), - 'MongoDbSettings': (MongoDbSettings, False), - 'Password': (basestring, False), - 'Port': (network_port, False), - 'S3Settings': (S3Settings, False), - 'ServerName': (basestring, False), - 'SslMode': (basestring, False), - 'Tags': (Tags, False), - 'Username': (basestring, True), - } - - -class EventSubscription(AWSObject): - resource_type = "AWS::DMS::EventSubscription" - - props = { - 'Enabled': (boolean, False), - 'EventCategories': ([basestring], False), - 'SnsTopicArn': (basestring, True), - 'SourceIds': ([basestring], False), - 'SourceType': (basestring, False), - 'SubscriptionName': ([basestring], False), - 'Tags': (Tags, False), - } - - -class ReplicationInstance(AWSObject): - resource_type = "AWS::DMS::ReplicationInstance" - - props = { - 'AllocatedStorage': (integer, False), - 'AutoMinorVersionUpgrade': (boolean, False), - 'AvailabilityZone': (basestring, False), - 'EngineVersion': (basestring, False), - 'KmsKeyId': (basestring, False), - 'MultiAZ': (boolean, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'PubliclyAccessible': (boolean, False), - 'ReplicationInstanceClass': (basestring, True), - 'ReplicationInstanceIdentifier': (basestring, False), - 'ReplicationSubnetGroupIdentifier': (basestring, False), - 'Tags': (Tags, False), - 'VpcSecurityGroupIds': ([basestring], False), - } - - -class ReplicationSubnetGroup(AWSObject): - resource_type = "AWS::DMS::ReplicationSubnetGroup" - - props = { - 'ReplicationSubnetGroupIdentifier': (basestring, False), - 'ReplicationSubnetGroupDescription': (basestring, True), - 'SubnetIds': ([basestring], True), - 'Tags': (Tags, False), - } - - -class ReplicationTask(AWSObject): - resource_type = "AWS::DMS::ReplicationTask" - - props = { - 'CdcStartTime': (positive_integer, False), - 'MigrationType': (basestring, True), - 'ReplicationInstanceArn': (basestring, True), - 'ReplicationTaskIdentifier': (basestring, False), - 'ReplicationTaskSettings': (basestring, False), - 'SourceEndpointArn': (basestring, True), - 'TableMappings': (basestring, True), - 'Tags': (Tags, False), - 'TargetEndpointArn': (basestring, True), - } diff --git a/troposphere/docdb.py b/troposphere/docdb.py deleted file mode 100644 index f6e1f19d5..000000000 --- a/troposphere/docdb.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, Tags -from .validators import boolean, integer - - -class DBCluster(AWSObject): - resource_type = "AWS::DocDB::DBCluster" - - props = { - 'AvailabilityZones': ([basestring], False), - 'BackupRetentionPeriod': (integer, False), - 'DBClusterIdentifier': (basestring, False), - 'DBClusterParameterGroupName': (basestring, False), - 'DBSubnetGroupName': (basestring, False), - 'EngineVersion': (basestring, False), - 'KmsKeyId': (basestring, False), - 'MasterUserPassword': (basestring, False), - 'MasterUsername': (basestring, False), - 'Port': (integer, False), - 'PreferredBackupWindow': (basestring, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'SnapshotIdentifier': (basestring, False), - 'StorageEncrypted': (boolean, False), - 'Tags': (Tags, False), - 'VpcSecurityGroupIds': ([basestring], False), - } - - -class DBClusterParameterGroup(AWSObject): - resource_type = "AWS::DocDB::DBClusterParameterGroup" - - props = { - 'Description': (basestring, True), - 'Family': (basestring, True), - 'Name': (basestring, False), - 'Parameters': (dict, True), - 'Tags': (Tags, False), - } - - -class DBInstance(AWSObject): - resource_type = "AWS::DocDB::DBInstance" - - props = { - 'AutoMinorVersionUpgrade': (boolean, False), - 'AvailabilityZone': (basestring, False), - 'DBClusterIdentifier': (basestring, True), - 'DBInstanceClass': (basestring, True), - 'DBInstanceIdentifier': (basestring, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'Tags': (Tags, False), - } - - -class DBSubnetGroup(AWSObject): - resource_type = "AWS::DocDB::DBSubnetGroup" - - props = { - 'DBSubnetGroupDescription': (basestring, True), - 'DBSubnetGroupName': (basestring, False), - 'SubnetIds': ([basestring], True), - 'Tags': (Tags, False), - } diff --git a/troposphere/dynamodb.py b/troposphere/dynamodb.py deleted file mode 100644 index e86c1808e..000000000 --- a/troposphere/dynamodb.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, AWSHelperFn, If, Tags -from .validators import boolean - - -def attribute_type_validator(x): - valid_types = ["S", "N", "B"] - if x not in valid_types: - raise ValueError("AttributeType must be one of: %s" % - ", ".join(valid_types)) - return x - - -def key_type_validator(x): - valid_types = ["HASH", "RANGE"] - if x not in valid_types: - raise ValueError("KeyType must be one of: %s" % ", ".join(valid_types)) - return x - - -def projection_type_validator(x): - valid_types = ["KEYS_ONLY", "INCLUDE", "ALL"] - if x not in valid_types: - raise ValueError("ProjectionType must be one of: %s" % - ", ".join(valid_types)) - return x - - -def billing_mode_validator(x): - valid_modes = ['PROVISIONED', 'PAY_PER_REQUEST'] - if x not in valid_modes: - raise ValueError("Table billing mode must be one of: %s" % - ", ".join(valid_modes)) - return x - - -class AttributeDefinition(AWSProperty): - props = { - "AttributeName": (basestring, True), - "AttributeType": (attribute_type_validator, True), - } - - -class KeySchema(AWSProperty): - props = { - "AttributeName": (basestring, True), - "KeyType": (key_type_validator, True) - } - - -class Key(KeySchema): - """ For backwards compatibility. """ - pass - - -class ProvisionedThroughput(AWSProperty): - props = { - "ReadCapacityUnits": (int, True), - "WriteCapacityUnits": (int, True), - } - - -class Projection(AWSProperty): - props = { - "NonKeyAttributes": ([basestring], False), - "ProjectionType": (projection_type_validator, False) - } - - -class SSESpecification(AWSProperty): - props = { - "SSEEnabled": (boolean, True), - } - - -class GlobalSecondaryIndex(AWSProperty): - props = { - "IndexName": (basestring, True), - "KeySchema": ([KeySchema], True), - "Projection": (Projection, True), - "ProvisionedThroughput": (ProvisionedThroughput, False) - } - - -class LocalSecondaryIndex(AWSProperty): - props = { - "IndexName": (basestring, True), - "KeySchema": ([KeySchema], True), - "Projection": (Projection, True), - } - - -class PointInTimeRecoverySpecification(AWSProperty): - props = { - 'PointInTimeRecoveryEnabled': (boolean, False), - } - - -class StreamSpecification(AWSProperty): - props = { - 'StreamViewType': (basestring, True), - } - - -class TimeToLiveSpecification(AWSProperty): - props = { - 'AttributeName': (basestring, True), - 'Enabled': (boolean, True), - } - - -class Table(AWSObject): - resource_type = "AWS::DynamoDB::Table" - - props = { - 'AttributeDefinitions': ([AttributeDefinition], True), - 'BillingMode': (billing_mode_validator, False), - 'GlobalSecondaryIndexes': ([GlobalSecondaryIndex], False), - 'KeySchema': ([KeySchema], True), - 'LocalSecondaryIndexes': ([LocalSecondaryIndex], False), - 'PointInTimeRecoverySpecification': - (PointInTimeRecoverySpecification, False), - 'ProvisionedThroughput': (ProvisionedThroughput, False), - 'SSESpecification': (SSESpecification, False), - 'StreamSpecification': (StreamSpecification, False), - 'TableName': (basestring, False), - 'Tags': (Tags, False), - 'TimeToLiveSpecification': (TimeToLiveSpecification, False), - } - - def validate(self): - billing_mode = self.properties.get('BillingMode', 'PROVISIONED') - indexes = self.properties.get('GlobalSecondaryIndexes', []) - tput_props = [self.properties] - tput_props.extend([ - x.properties for x in indexes if not isinstance(x, AWSHelperFn) - ]) - - def check_if_all(name, props): - validated = [] - for prop in props: - is_helper = isinstance(prop.get(name), AWSHelperFn) - validated.append(name in prop or is_helper) - return all(validated) - - def check_any(name, props): - validated = [] - for prop in props: - is_helper = isinstance(prop.get(name), AWSHelperFn) - validated.append(name in prop and not is_helper) - return any(validated) - - if isinstance(billing_mode, If): - if check_any('ProvisionedThroughput', tput_props): - raise ValueError( - 'Table billing mode is per-request. ' - 'ProvisionedThroughput property is mutually exclusive') - return - - if billing_mode == 'PROVISIONED': - if not check_if_all('ProvisionedThroughput', tput_props): - raise ValueError( - 'Table billing mode is provisioned. ' - 'ProvisionedThroughput required if available') - elif billing_mode == 'PAY_PER_REQUEST': - if check_any('ProvisionedThroughput', tput_props): - raise ValueError( - 'Table billing mode is per-request. ' - 'ProvisionedThroughput property is mutually exclusive') diff --git a/troposphere/dynamodb2.py b/troposphere/dynamodb2.py deleted file mode 100644 index c56e5be43..000000000 --- a/troposphere/dynamodb2.py +++ /dev/null @@ -1,35 +0,0 @@ -from .dynamodb import ( - AttributeDefinition, - KeySchema, - Key, - ProvisionedThroughput, - Projection, - GlobalSecondaryIndex, - LocalSecondaryIndex, - StreamSpecification, - Table, - attribute_type_validator, - key_type_validator, - projection_type_validator, -) - -import warnings - -# Only way to make pyflakes shut up about unused imports -assert AttributeDefinition -assert KeySchema -assert Key -assert ProvisionedThroughput -assert Projection -assert GlobalSecondaryIndex -assert LocalSecondaryIndex -assert StreamSpecification -assert Table -assert attribute_type_validator -assert key_type_validator -assert projection_type_validator - - -warnings.warn("This module has replaced by troposphere.dynamodb. Please " - "import that module instead, as troposphere.dynamodb2 will be " - "removed soon.") diff --git a/troposphere/ec2.py b/troposphere/ec2.py deleted file mode 100644 index f6929cb0e..000000000 --- a/troposphere/ec2.py +++ /dev/null @@ -1,1097 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import ( - boolean, exactly_one, integer, integer_range, double, - network_port, positive_integer, vpn_pre_shared_key, vpn_tunnel_inside_cidr, - vpc_endpoint_type -) - -try: - from awacs.aws import Policy - - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -VALID_ELASTICINFERENCEACCELERATOR_TYPES = ('eia1.medium', 'eia1.large', - 'eia1.xlarge') - - -def validate_elasticinferenceaccelerator_type( - elasticinferenceaccelerator_type): - """Validate ElasticInferenceAccelerator for Instance""" - - if elasticinferenceaccelerator_type not in VALID_ELASTICINFERENCEACCELERATOR_TYPES: # NOQA - raise ValueError("Elastic Inference Accelerator Type must be one of: %s" % # NOQA - ", ".join(VALID_ELASTICINFERENCEACCELERATOR_TYPES)) - return elasticinferenceaccelerator_type - - -class Tag(AWSProperty): - props = { - 'Key': (basestring, True), - 'Value': (basestring, True) - } - - def __init__(self, key=None, value=None, **kwargs): - # provided for backward compatibility - if key is not None: - kwargs['Key'] = key - if value is not None: - kwargs['Value'] = value - super(Tag, self).__init__(**kwargs) - - -class CustomerGateway(AWSObject): - resource_type = "AWS::EC2::CustomerGateway" - - props = { - 'BgpAsn': (integer, True), - 'IpAddress': (basestring, True), - 'Tags': ((Tags, list), False), - 'Type': (basestring, True), - } - - -class DHCPOptions(AWSObject): - resource_type = "AWS::EC2::DHCPOptions" - - props = { - 'DomainName': (basestring, False), - 'DomainNameServers': (list, False), - 'NetbiosNameServers': (list, False), - 'NetbiosNodeType': (integer, False), - 'NtpServers': (list, False), - 'Tags': ((Tags, list), False), - } - - -class EgressOnlyInternetGateway(AWSObject): - resource_type = "AWS::EC2::EgressOnlyInternetGateway" - - props = { - 'VpcId': (basestring, True), - } - - -class EIP(AWSObject): - resource_type = "AWS::EC2::EIP" - - props = { - 'InstanceId': (basestring, False), - 'Domain': (basestring, False), - 'PublicIpv4Pool': (basestring, False), - } - - -class EIPAssociation(AWSObject): - resource_type = "AWS::EC2::EIPAssociation" - - props = { - 'AllocationId': (basestring, False), - 'EIP': (basestring, False), - 'InstanceId': (basestring, False), - 'NetworkInterfaceId': (basestring, False), - 'PrivateIpAddress': (basestring, False), - } - - -class FlowLog(AWSObject): - resource_type = "AWS::EC2::FlowLog" - - props = { - 'DeliverLogsPermissionArn': (basestring, False), - 'LogDestination': (basestring, False), - 'LogDestinationType': (basestring, False), - 'LogGroupName': (basestring, False), - 'ResourceId': (basestring, True), - 'ResourceType': (basestring, True), - 'TrafficType': (basestring, True), - } - - -class NatGateway(AWSObject): - resource_type = "AWS::EC2::NatGateway" - - props = { - 'AllocationId': (basestring, True), - 'SubnetId': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class EBSBlockDevice(AWSProperty): - props = { - 'DeleteOnTermination': (boolean, False), - 'Encrypted': (boolean, False), - 'Iops': (integer, False), # Conditional - 'SnapshotId': (basestring, False), # Conditional - 'VolumeSize': (integer, False), # Conditional - 'VolumeType': (basestring, False), - } - - -NO_DEVICE = {} - - -class BlockDeviceMapping(AWSProperty): - props = { - 'DeviceName': (basestring, True), - 'Ebs': (EBSBlockDevice, False), # Conditional - 'NoDevice': (dict, False), - 'VirtualName': (basestring, False), # Conditional - } - - -class MountPoint(AWSProperty): - props = { - 'Device': (basestring, True), - 'VolumeId': (basestring, True), - } - - -class Placement(AWSProperty): - props = { - 'AvailabilityZone': (basestring, False), - 'GroupName': (basestring, False), - } - - -class CreditSpecification(AWSProperty): - props = { - 'CPUCredits': (basestring, False), - } - - -class ElasticGpuSpecification(AWSProperty): - props = { - 'Type': (basestring, True), - } - - -class Ipv6Addresses(AWSHelperFn): - def __init__(self, address): - self.data = { - 'Ipv6Address': address, - } - - -class LaunchTemplateSpecification(AWSProperty): - props = { - 'LaunchTemplateId': (basestring, False), - 'LaunchTemplateName': (basestring, False), - 'Version': (basestring, True), - } - - -class PrivateIpAddressSpecification(AWSProperty): - props = { - 'Primary': (boolean, True), - 'PrivateIpAddress': (basestring, True), - } - - -class NetworkInterfaceProperty(AWSProperty): - props = { - 'AssociatePublicIpAddress': (boolean, False), - 'DeleteOnTermination': (boolean, False), - 'Description': (basestring, False), - 'DeviceIndex': (integer, True), - 'GroupSet': ([basestring], False), - 'NetworkInterfaceId': (basestring, False), - 'Ipv6AddressCount': (integer, False), - 'Ipv6Addresses': ([Ipv6Addresses], False), - 'PrivateIpAddress': (basestring, False), - 'PrivateIpAddresses': ([PrivateIpAddressSpecification], False), - 'SecondaryPrivateIpAddressCount': (integer, False), - 'SubnetId': (basestring, False), - } - - -class AssociationParameters(AWSProperty): - props = { - 'Key': (basestring, True), - 'Value': ([basestring], True), - } - - -class SsmAssociations(AWSProperty): - props = { - 'AssociationParameters': ([AssociationParameters], False), - 'DocumentName': (basestring, True), - } - - -class Host(AWSObject): - resource_type = "AWS::EC2::Host" - - props = { - 'AutoPlacement': (basestring, False), - 'AvailabilityZone': (basestring, True), - 'InstanceType': (basestring, True), - } - - -class ElasticInferenceAccelerator(AWSProperty): - props = { - 'Type': (validate_elasticinferenceaccelerator_type, True), - } - - -class LicenseSpecification(AWSProperty): - props = { - 'LicenseConfigurationArn': (basestring, True), - } - - -class Instance(AWSObject): - resource_type = "AWS::EC2::Instance" - - props = { - 'Affinity': (basestring, False), - 'AvailabilityZone': (basestring, False), - 'BlockDeviceMappings': (list, False), - 'CreditSpecification': (CreditSpecification, False), - 'DisableApiTermination': (boolean, False), - 'EbsOptimized': (boolean, False), - 'ElasticGpuSpecifications': ([ElasticGpuSpecification], False), - 'ElasticInferenceAccelerators': ([ElasticInferenceAccelerator], False), - 'HostId': (basestring, False), - 'IamInstanceProfile': (basestring, False), - 'ImageId': (basestring, False), - 'InstanceInitiatedShutdownBehavior': (basestring, False), - 'InstanceType': (basestring, False), - 'Ipv6AddressCount': (integer, False), - 'Ipv6Addresses': ([Ipv6Addresses], False), - 'KernelId': (basestring, False), - 'KeyName': (basestring, False), - 'LaunchTemplate': (LaunchTemplateSpecification, False), - 'LicenseSpecifications': ([LicenseSpecification], False), - 'Monitoring': (boolean, False), - 'NetworkInterfaces': ([NetworkInterfaceProperty], False), - 'PlacementGroupName': (basestring, False), - 'PrivateIpAddress': (basestring, False), - 'RamdiskId': (basestring, False), - 'SecurityGroupIds': (list, False), - 'SecurityGroups': (list, False), - 'SsmAssociations': ([SsmAssociations], False), - 'SourceDestCheck': (boolean, False), - 'SubnetId': (basestring, False), - 'Tags': ((Tags, list), False), - 'Tenancy': (basestring, False), - 'UserData': (basestring, False), - 'Volumes': (list, False), - } - - -class InternetGateway(AWSObject): - resource_type = "AWS::EC2::InternetGateway" - - props = { - 'Tags': ((Tags, list), False), - } - - -class NetworkAcl(AWSObject): - resource_type = "AWS::EC2::NetworkAcl" - - props = { - 'Tags': ((Tags, list), False), - 'VpcId': (basestring, True), - } - - -class ICMP(AWSProperty): - props = { - 'Code': (integer, False), - 'Type': (integer, False), - } - - -class PortRange(AWSProperty): - props = { - 'From': (network_port, False), - 'To': (network_port, False), - } - - -class NetworkAclEntry(AWSObject): - resource_type = "AWS::EC2::NetworkAclEntry" - - props = { - 'CidrBlock': (basestring, False), - 'Egress': (boolean, False), - 'Icmp': (ICMP, False), # Conditional - 'Ipv6CidrBlock': (basestring, False), - 'NetworkAclId': (basestring, True), - 'PortRange': (PortRange, False), # Conditional - 'Protocol': (network_port, True), - 'RuleAction': (basestring, True), - 'RuleNumber': (integer_range(1, 32766), True), - } - - def validate(self): - conds = [ - 'CidrBlock', - 'Ipv6CidrBlock', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class NetworkInterface(AWSObject): - resource_type = "AWS::EC2::NetworkInterface" - - props = { - 'Description': (basestring, False), - 'GroupSet': (list, False), - 'Ipv6AddressCount': (integer, False), - 'Ipv6Addresses': ([Ipv6Addresses], False), - 'PrivateIpAddress': (basestring, False), - 'PrivateIpAddresses': ([PrivateIpAddressSpecification], False), - 'SecondaryPrivateIpAddressCount': (integer, False), - 'SourceDestCheck': (boolean, False), - 'SubnetId': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class NetworkInterfaceAttachment(AWSObject): - resource_type = "AWS::EC2::NetworkInterfaceAttachment" - - props = { - 'DeleteOnTermination': (boolean, False), - 'DeviceIndex': (integer, True), - 'InstanceId': (basestring, True), - 'NetworkInterfaceId': (basestring, True), - } - - -PERMISSION_INSTANCE_ATTACH = 'INSTANCE-ATTACH' -PERMISSION_EIP_ASSOCIATE = 'EIP-ASSOCIATE' - - -class NetworkInterfacePermission(AWSObject): - resource_type = "AWS::EC2::NetworkInterfacePermission" - - props = { - 'AwsAccountId': (basestring, True), - 'NetworkInterfaceId': (basestring, True), - 'Permission': (basestring, True), - } - - -class Route(AWSObject): - resource_type = "AWS::EC2::Route" - - props = { - 'DestinationCidrBlock': (basestring, False), - 'DestinationIpv6CidrBlock': (basestring, False), - 'EgressOnlyInternetGatewayId': (basestring, False), - 'GatewayId': (basestring, False), - 'InstanceId': (basestring, False), - 'NatGatewayId': (basestring, False), - 'NetworkInterfaceId': (basestring, False), - 'RouteTableId': (basestring, True), - 'VpcPeeringConnectionId': (basestring, False), - } - - def validate(self): - cidr_conds = [ - 'DestinationCidrBlock', - 'DestinationIpv6CidrBlock', - ] - gateway_conds = [ - 'EgressOnlyInternetGatewayId', - 'GatewayId', - 'InstanceId', - 'NatGatewayId', - 'NetworkInterfaceId', - 'VpcPeeringConnectionId' - ] - exactly_one(self.__class__.__name__, self.properties, cidr_conds) - exactly_one(self.__class__.__name__, self.properties, gateway_conds) - - -class RouteTable(AWSObject): - resource_type = "AWS::EC2::RouteTable" - - props = { - 'Tags': ((Tags, list), False), - 'VpcId': (basestring, True), - } - - -def check_ports(props): - # IpProtocol is a required field but not all values allowed require - # ToPort and FromPort. The ones that don't need these ports are: - ports_optional = [ - "-1", # all protocols - "58", # ICMPv6 - ] - proto = props['IpProtocol'] - - if proto not in ports_optional: - if not ('ToPort' in props and 'FromPort' in props): - raise ValueError( - "ToPort/FromPort must be specified for proto %s" % proto) - - -class SecurityGroupEgress(AWSObject): - resource_type = "AWS::EC2::SecurityGroupEgress" - - props = { - 'CidrIp': (basestring, False), - 'CidrIpv6': (basestring, False), - 'Description': (basestring, False), - 'DestinationPrefixListId': (basestring, False), - 'DestinationSecurityGroupId': (basestring, False), - 'FromPort': (network_port, False), - 'GroupId': (basestring, True), - 'IpProtocol': (basestring, True), - 'ToPort': (network_port, False), - # - # Workaround for a bug in CloudFormation and EC2 where the - # DestinationSecurityGroupId property is ignored causing - # egress rules targeting a security group to be ignored. - # Using SourceSecurityGroupId instead works fine even in - # egress rules. AWS have known about this bug for a while. - # - 'SourceSecurityGroupId': (basestring, False), - } - - def validate(self): - conds = [ - 'CidrIp', - 'CidrIpv6', - 'DestinationPrefixListId', - 'DestinationSecurityGroupId', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - check_ports(self.properties) - - -class SecurityGroupIngress(AWSObject): - resource_type = "AWS::EC2::SecurityGroupIngress" - - props = { - 'CidrIp': (basestring, False), - 'CidrIpv6': (basestring, False), - 'Description': (basestring, False), - 'FromPort': (network_port, False), - 'GroupName': (basestring, False), - 'GroupId': (basestring, False), - 'IpProtocol': (basestring, True), - 'SourceSecurityGroupName': (basestring, False), - 'SourceSecurityGroupId': (basestring, False), - 'SourceSecurityGroupOwnerId': (basestring, False), - 'ToPort': (network_port, False), - } - - def validate(self): - conds = [ - 'CidrIp', - 'CidrIpv6', - 'SourceSecurityGroupName', - 'SourceSecurityGroupId', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - check_ports(self.properties) - - -class SecurityGroupRule(AWSProperty): - props = { - 'CidrIp': (basestring, False), - 'CidrIpv6': (basestring, False), - 'Description': (basestring, False), - 'FromPort': (network_port, False), - 'IpProtocol': (basestring, True), - 'SourceSecurityGroupId': (basestring, False), - 'SourceSecurityGroupName': (basestring, False), - 'SourceSecurityGroupOwnerId': (basestring, False), - 'ToPort': (network_port, False), - 'DestinationSecurityGroupId': (basestring, False), - } - - -class SecurityGroup(AWSObject): - resource_type = "AWS::EC2::SecurityGroup" - - props = { - 'GroupName': (basestring, False), - 'GroupDescription': (basestring, True), - 'SecurityGroupEgress': (list, False), - 'SecurityGroupIngress': (list, False), - 'VpcId': (basestring, False), - 'Tags': ((Tags, list), False), - } - - -class Subnet(AWSObject): - resource_type = "AWS::EC2::Subnet" - - props = { - 'AssignIpv6AddressOnCreation': (boolean, False), - 'AvailabilityZone': (basestring, False), - 'CidrBlock': (basestring, True), - 'Ipv6CidrBlock': (basestring, False), - 'MapPublicIpOnLaunch': (boolean, False), - 'Tags': ((Tags, list), False), - 'VpcId': (basestring, True), - } - - def validate(self): - if 'Ipv6CidrBlock' in self.properties: - if not self.properties.get('AssignIpv6AddressOnCreation'): - raise ValueError( - "If Ipv6CidrBlock is present, " - "AssignIpv6AddressOnCreation must be set to True" - ) - - -class SubnetNetworkAclAssociation(AWSObject): - resource_type = "AWS::EC2::SubnetNetworkAclAssociation" - - props = { - 'SubnetId': (basestring, True), - 'NetworkAclId': (basestring, True), - } - - -class SubnetRouteTableAssociation(AWSObject): - resource_type = "AWS::EC2::SubnetRouteTableAssociation" - - props = { - 'RouteTableId': (basestring, True), - 'SubnetId': (basestring, True), - } - - -class Volume(AWSObject): - resource_type = "AWS::EC2::Volume" - - props = { - 'AutoEnableIO': (boolean, False), - 'AvailabilityZone': (basestring, True), - 'Encrypted': (boolean, False), - 'Iops': (positive_integer, False), - 'KmsKeyId': (basestring, False), - 'Size': (positive_integer, False), - 'SnapshotId': (basestring, False), - 'Tags': ((Tags, list), False), - 'VolumeType': (basestring, False), - } - - -class VolumeAttachment(AWSObject): - resource_type = "AWS::EC2::VolumeAttachment" - - props = { - 'Device': (basestring, True), - 'InstanceId': (basestring, True), - 'VolumeId': (basestring, True), - } - - -def instance_tenancy(value): - valid = ['default', 'dedicated'] - if value not in valid: - raise ValueError('InstanceTenancy needs to be one of %r' % valid) - return value - - -class VPC(AWSObject): - resource_type = "AWS::EC2::VPC" - - props = { - 'CidrBlock': (basestring, True), - 'EnableDnsSupport': (boolean, False), - 'EnableDnsHostnames': (boolean, False), - 'InstanceTenancy': (instance_tenancy, False), - 'Tags': ((Tags, list), False), - } - - -class VPCDHCPOptionsAssociation(AWSObject): - resource_type = "AWS::EC2::VPCDHCPOptionsAssociation" - - props = { - 'DhcpOptionsId': (basestring, True), - 'VpcId': (basestring, True), - } - - -class VPCEndpoint(AWSObject): - resource_type = "AWS::EC2::VPCEndpoint" - - props = { - 'PolicyDocument': (policytypes, False), - 'PrivateDnsEnabled': (boolean, False), - 'RouteTableIds': ([basestring], False), - 'SecurityGroupIds': ([basestring], False), - 'ServiceName': (basestring, True), - 'SubnetIds': ([basestring], False), - 'VpcEndpointType': (vpc_endpoint_type, False), - 'VpcId': (basestring, True), - } - - -class VPCEndpointConnectionNotification(AWSObject): - resource_type = "AWS::EC2::VPCEndpointConnectionNotification" - - props = { - 'ConnectionEvents': ([basestring], True), - 'ConnectionNotificationArn': (basestring, True), - 'ServiceId': (basestring, False), - 'VPCEndpointId': (basestring, False), - } - - -class VPCEndpointService(AWSObject): - resource_type = "AWS::EC2::VPCEndpointService" - - props = { - 'AcceptanceRequired': (boolean, False), - 'NetworkLoadBalancerArns': ([basestring], True), - } - - -class VPCEndpointServicePermissions(AWSObject): - resource_type = "AWS::EC2::VPCEndpointServicePermissions" - - props = { - 'AllowedPrincipals': ([basestring], False), - 'ServiceId': (basestring, True), - } - - -class VPCGatewayAttachment(AWSObject): - resource_type = "AWS::EC2::VPCGatewayAttachment" - - props = { - 'InternetGatewayId': (basestring, False), - 'VpcId': (basestring, True), - 'VpnGatewayId': (basestring, False), - } - - -class VpnTunnelOptionsSpecification(AWSProperty): - props = { - 'PreSharedKey': (vpn_pre_shared_key, False), - 'TunnelInsideCidr': (vpn_tunnel_inside_cidr, False), - } - - -class VPNConnection(AWSObject): - resource_type = "AWS::EC2::VPNConnection" - - props = { - 'Type': (basestring, True), - 'CustomerGatewayId': (basestring, True), - 'StaticRoutesOnly': (boolean, False), - 'Tags': ((Tags, list), False), - 'VpnGatewayId': (basestring, True), - 'VpnTunnelOptionsSpecifications': ( - [VpnTunnelOptionsSpecification], False - ), - } - - -class VPNConnectionRoute(AWSObject): - resource_type = "AWS::EC2::VPNConnectionRoute" - - props = { - 'DestinationCidrBlock': (basestring, True), - 'VpnConnectionId': (basestring, True), - } - - -class VPNGateway(AWSObject): - resource_type = "AWS::EC2::VPNGateway" - - props = { - 'AmazonSideAsn': (positive_integer, False), - 'Type': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class VPNGatewayRoutePropagation(AWSObject): - resource_type = "AWS::EC2::VPNGatewayRoutePropagation" - - props = { - 'RouteTableIds': ([basestring], True), - 'VpnGatewayId': (basestring, True), - } - - -class VPCPeeringConnection(AWSObject): - resource_type = "AWS::EC2::VPCPeeringConnection" - - props = { - 'PeerVpcId': (basestring, True), - 'VpcId': (basestring, True), - 'Tags': ((Tags, list), False), - 'PeerRegion': (basestring, False), - 'PeerOwnerId': (basestring, False), - 'PeerRoleArn': (basestring, False), - } - - -class Monitoring(AWSProperty): - props = { - 'Enabled': (boolean, False), - } - - -class NetworkInterfaces(AWSProperty): - props = { - 'AssociatePublicIpAddress': (boolean, False), - 'DeleteOnTermination': (boolean, False), - 'Description': (basestring, False), - 'DeviceIndex': (integer, True), - 'Groups': ([basestring], False), - 'Ipv6AddressCount': (integer, False), - 'Ipv6Addresses': ([Ipv6Addresses], False), - 'NetworkInterfaceId': (basestring, False), - 'PrivateIpAddresses': ([PrivateIpAddressSpecification], False), - 'SecondaryPrivateIpAddressCount': (integer, False), - 'SubnetId': (basestring, False), - } - - -class SecurityGroups(AWSProperty): - props = { - 'GroupId': (basestring, False), - } - - -class IamInstanceProfile(AWSProperty): - props = { - 'Arn': (basestring, False), - } - - -class SpotFleetTagSpecification(AWSProperty): - props = { - 'ResourceType': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class LaunchSpecifications(AWSProperty): - props = { - 'BlockDeviceMappings': ([BlockDeviceMapping], False), - 'EbsOptimized': (boolean, False), - 'IamInstanceProfile': (IamInstanceProfile, False), - 'ImageId': (basestring, True), - 'InstanceType': (basestring, True), - 'KernelId': (basestring, False), - 'KeyName': (basestring, False), - 'Monitoring': (Monitoring, False), - 'NetworkInterfaces': ([NetworkInterfaces], False), - 'Placement': (Placement, False), - 'RamdiskId': (basestring, False), - 'SecurityGroups': ([SecurityGroups], False), - 'SpotPrice': (basestring, False), - 'SubnetId': (basestring, False), - 'TagSpecifications': ([SpotFleetTagSpecification], False), - 'UserData': (basestring, False), - 'WeightedCapacity': (positive_integer, False), - } - - -class LaunchTemplateOverrides(AWSProperty): - props = { - 'AvailabilityZone': (basestring, False), - 'InstanceType': (basestring, False), - 'SpotPrice': (basestring, False), - 'SubnetId': (basestring, False), - 'WeightedCapacity': (double, False) - } - - -class LaunchTemplateConfigs(AWSProperty): - props = { - 'LaunchTemplateSpecification': (LaunchTemplateSpecification, True), - 'Overrides': ([LaunchTemplateOverrides], False) - } - - -class ClassicLoadBalancer(AWSProperty): - props = { - 'Name': (basestring, True) - } - - -class ClassicLoadBalancersConfig(AWSProperty): - props = { - 'ClassicLoadBalancers': ([ClassicLoadBalancer], True) - } - - -class TargetGroup(AWSProperty): - props = { - 'Arn': (basestring, True) - } - - -class LoadBalancersConfig(AWSProperty): - props = { - 'ClassicLoadBalancersConfig': ([ClassicLoadBalancersConfig], False), - 'TargetGroupsConfig': (TargetGroup, False) - } - - -class SpotFleetRequestConfigData(AWSProperty): - - props = { - 'AllocationStrategy': (basestring, False), - 'ExcessCapacityTerminationPolicy': (basestring, False), - 'IamFleetRole': (basestring, True), - 'InstanceInterruptionBehavior': (basestring, False), - 'LaunchSpecifications': ([LaunchSpecifications], False), - 'LaunchTemplateConfigs': ([LaunchTemplateConfigs], False), - 'LoadBalancersConfig': (LoadBalancersConfig, False), - 'ReplaceUnhealthyInstances': (boolean, False), - 'SpotPrice': (basestring, False), - 'TargetCapacity': (positive_integer, True), - 'TerminateInstancesWithExpiration': (boolean, False), - 'Type': (basestring, False), - 'ValidFrom': (basestring, False), - 'ValidUntil': (basestring, False), - } - - def validate(self): - conds = [ - 'LaunchSpecifications', - 'LaunchTemplateConfigs' - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class SpotFleet(AWSObject): - resource_type = "AWS::EC2::SpotFleet" - - props = { - 'SpotFleetRequestConfigData': (SpotFleetRequestConfigData, True), - } - - -class PlacementGroup(AWSObject): - resource_type = "AWS::EC2::PlacementGroup" - - props = { - 'Strategy': (basestring, True), - } - - -class SubnetCidrBlock(AWSObject): - resource_type = "AWS::EC2::SubnetCidrBlock" - - props = { - 'Ipv6CidrBlock': (basestring, True), - 'SubnetId': (basestring, True), - } - - -class VPCCidrBlock(AWSObject): - resource_type = "AWS::EC2::VPCCidrBlock" - - props = { - 'AmazonProvidedIpv6CidrBlock': (boolean, False), - 'CidrBlock': (basestring, False), - 'VpcId': (basestring, True), - } - - -class TagSpecifications(AWSProperty): - props = { - 'ResourceType': (basestring, False), - 'Tags': ((Tags, list), False) - } - - -class SpotOptions(AWSProperty): - props = { - 'InstanceInterruptionBehavior': (basestring, False), - 'MaxPrice': (basestring, False), - 'SpotInstanceType': (basestring, False) - } - - -class InstanceMarketOptions(AWSProperty): - props = { - 'MarketType': (basestring, False), - 'SpotOptions': (SpotOptions, False) - } - - -class LaunchTemplateCreditSpecification(AWSProperty): - props = { - 'CpuCredits': (basestring, False), - } - - -class LaunchTemplateData(AWSProperty): - props = { - 'BlockDeviceMappings': ([BlockDeviceMapping], False), - 'CreditSpecification': (LaunchTemplateCreditSpecification, False), - 'DisableApiTermination': (boolean, False), - 'EbsOptimized': (boolean, False), - 'ElasticGpuSpecifications': ([ElasticGpuSpecification], False), - 'IamInstanceProfile': (IamInstanceProfile, False), - 'ImageId': (basestring, True), - 'InstanceInitiatedShutdownBehavior': (basestring, False), - 'InstanceMarketOptions': (InstanceMarketOptions, False), - 'InstanceType': (basestring, False), - 'KernelId': (basestring, False), - 'KeyName': (basestring, False), - 'Monitoring': (Monitoring, False), - 'NetworkInterfaces': ([NetworkInterfaces], False), - 'Placement': (Placement, False), - 'RamDiskId': (basestring, False), - 'SecurityGroups': (list, False), - 'SecurityGroupIds': (list, False), - 'TagSpecifications': ([TagSpecifications], False), - 'UserData': (basestring, False) - } - - -class LaunchTemplate(AWSObject): - resource_type = "AWS::EC2::LaunchTemplate" - props = { - 'LaunchTemplateData': (LaunchTemplateData, False), - 'LaunchTemplateName': (basestring, False), - } - - -class TransitGateway(AWSObject): - resource_type = "AWS::EC2::TransitGateway" - props = { - 'AmazonSideAsn': (integer, False), - 'AutoAcceptSharedAttachments': (basestring, False), - 'DefaultRouteTableAssociation': (basestring, False), - 'DefaultRouteTablePropagation': (basestring, False), - 'DnsSupport': (basestring, False), - 'Tags': ((Tags, list), False), - 'VpnEcmpSupport': (basestring, False), - } - - -class TransitGatewayAttachment(AWSObject): - resource_type = "AWS::EC2::TransitGatewayAttachment" - props = { - 'SubnetIds': ([basestring], True), - 'Tags': ((Tags, list), False), - 'TransitGatewayId': (basestring, True), - 'VpcId': (basestring, True), - } - - -class TransitGatewayRoute(AWSObject): - resource_type = "AWS::EC2::TransitGatewayRoute" - props = { - 'Blackhole': (boolean, False), - 'DestinationCidrBlock': (basestring, False), - 'TransitGatewayAttachmentId': (basestring, False), - 'TransitGatewayRouteTableId': (basestring, True), - } - - -class TransitGatewayRouteTable(AWSObject): - resource_type = "AWS::EC2::TransitGatewayRouteTable" - props = { - 'Tags': ((Tags, list), False), - 'TransitGatewayId': (basestring, True), - } - - -class TransitGatewayRouteTableAssociation(AWSObject): - resource_type = "AWS::EC2::TransitGatewayRouteTableAssociation" - props = { - 'TransitGatewayAttachmentId': (basestring, True), - 'TransitGatewayRouteTableId': (basestring, True), - } - - -class TransitGatewayRouteTablePropagation(AWSObject): - resource_type = "AWS::EC2::TransitGatewayRouteTablePropagation" - props = { - 'TransitGatewayAttachmentId': (basestring, True), - 'TransitGatewayRouteTableId': (basestring, True), - } - - -class FleetLaunchTemplateSpecificationRequest(AWSProperty): - props = { - 'LaunchTemplateId': (basestring, False), - 'LaunchTemplateName': (basestring, False), - 'Version': (basestring, False), - } - - -class FleetLaunchTemplateOverridesRequest(AWSProperty): - props = { - 'AvailabilityZone': (basestring, False), - 'InstanceType': (basestring, False), - 'MaxPrice': (basestring, False), - 'Priority': (double, False), - 'SubnetId': (basestring, False), - 'WeightedCapacity': (double, False), - } - - -class FleetLaunchTemplateConfigRequest(AWSProperty): - props = { - 'LaunchTemplateSpecification': ( - FleetLaunchTemplateSpecificationRequest, - False - ), - 'Overrides': ([FleetLaunchTemplateOverridesRequest], False), - } - - -class OnDemandOptionsRequest(AWSProperty): - props = { - 'AllocationStrategy': (basestring, False), - } - - -class SpotOptionsRequest(AWSProperty): - props = { - 'AllocationStrategy': (basestring, False), - 'InstanceInterruptionBehavior': (basestring, False), - 'InstancePoolsToUseCount': (integer, False), - } - - -class TargetCapacitySpecificationRequest(AWSProperty): - props = { - 'DefaultTargetCapacityType': (basestring, False), - 'OnDemandTargetCapacity': (integer, False), - 'SpotTargetCapacity': (integer, False), - 'TotalTargetCapacity': (integer, False), - } - - -class EC2Fleet(AWSObject): - resource_type = "AWS::EC2::EC2Fleet" - props = { - 'ExcessCapacityTerminationPolicy': (basestring, False), - 'LaunchTemplateConfigs': ([FleetLaunchTemplateConfigRequest], True), - 'OnDemandOptions': (OnDemandOptionsRequest, False), - 'ReplaceUnhealthyInstances': (boolean, False), - 'SpotOptions': (SpotOptionsRequest, False), - 'TagSpecifications': ([TagSpecifications], False), - 'TargetCapacitySpecification': (TargetCapacitySpecificationRequest, - False), - 'TerminateInstancesWithExpiration': (boolean, False), - 'Type': (basestring, False), - 'ValidFrom': (str, False), - 'ValidUntil': (str, False), - } diff --git a/troposphere/ecr.py b/troposphere/ecr.py deleted file mode 100644 index c5bdc1ff5..000000000 --- a/troposphere/ecr.py +++ /dev/null @@ -1,23 +0,0 @@ -from . import AWSObject, AWSProperty -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class LifecyclePolicy(AWSProperty): - props = { - 'LifecyclePolicyText': (basestring, False), - 'RegistryId': (basestring, False), - } - - -class Repository(AWSObject): - resource_type = "AWS::ECR::Repository" - - props = { - 'LifecyclePolicy': (LifecyclePolicy, False), - 'RepositoryName': (basestring, False), - 'RepositoryPolicyText': (policytypes, False), - } diff --git a/troposphere/ecs.py b/troposphere/ecs.py deleted file mode 100644 index d5b7a0518..000000000 --- a/troposphere/ecs.py +++ /dev/null @@ -1,290 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import boolean, integer, network_port, positive_integer - - -LAUNCH_TYPE_EC2 = 'EC2' -LAUNCH_TYPE_FARGATE = 'FARGATE' - -SCHEDULING_STRATEGY_REPLICA = 'REPLICA' -SCHEDULING_STRATEGY_DAEMON = 'DAEMON' - - -class Cluster(AWSObject): - resource_type = "AWS::ECS::Cluster" - - props = { - 'ClusterName': (basestring, False), - } - - -class LoadBalancer(AWSProperty): - props = { - 'ContainerName': (basestring, False), - 'ContainerPort': (network_port, True), - 'LoadBalancerName': (basestring, False), - 'TargetGroupArn': (basestring, False), - } - - -class DeploymentConfiguration(AWSProperty): - props = { - 'MaximumPercent': (positive_integer, False), - 'MinimumHealthyPercent': (positive_integer, False), - } - - -def placement_strategy_validator(x): - valid_values = ['random', 'spread', 'binpack'] - if x not in valid_values: - raise ValueError("Placement Strategy type must be one of: %s" % - ', '.join(valid_values)) - return x - - -def placement_constraint_validator(x): - valid_values = ['distinctInstance', 'memberOf'] - if x not in valid_values: - raise ValueError("Placement Constraint type must be one of: %s" % - ', '.join(valid_values)) - return x - - -def scope_validator(x): - valid_values = ['shared', 'task'] - if x not in valid_values: - raise ValueError("Scope type must be one of: %s" % - ', '.join(valid_values)) - return x - - -class PlacementConstraint(AWSProperty): - props = { - 'Type': (placement_constraint_validator, True), - 'Expression': (basestring, False), - } - - -class PlacementStrategy(AWSProperty): - props = { - 'Type': (placement_strategy_validator, True), - 'Field': (basestring, False), - } - - -class AwsvpcConfiguration(AWSProperty): - props = { - 'AssignPublicIp': (basestring, False), - 'SecurityGroups': (list, False), - 'Subnets': (list, True), - } - - -class NetworkConfiguration(AWSProperty): - props = { - 'AwsvpcConfiguration': (AwsvpcConfiguration, False), - } - - -def launch_type_validator(x): - valid_values = [LAUNCH_TYPE_EC2, LAUNCH_TYPE_FARGATE] - if x not in valid_values: - raise ValueError("Launch Type must be one of: %s" % - ', '.join(valid_values)) - return x - - -class ServiceRegistry(AWSProperty): - props = { - 'ContainerName': (basestring, False), - 'ContainerPort': (integer, False), - 'Port': (integer, False), - 'RegistryArn': (basestring, False), - } - - -class Service(AWSObject): - resource_type = "AWS::ECS::Service" - - props = { - 'Cluster': (basestring, False), - 'DeploymentConfiguration': (DeploymentConfiguration, False), - 'DesiredCount': (positive_integer, False), - 'HealthCheckGracePeriodSeconds': (positive_integer, False), - 'LaunchType': (launch_type_validator, False), - 'LoadBalancers': ([LoadBalancer], False), - 'NetworkConfiguration': (NetworkConfiguration, False), - 'Role': (basestring, False), - 'PlacementConstraints': ([PlacementConstraint], False), - 'PlacementStrategies': ([PlacementStrategy], False), - 'PlatformVersion': (basestring, False), - 'SchedulingStrategy': (basestring, False), - 'ServiceName': (basestring, False), - 'ServiceRegistries': ([ServiceRegistry], False), - 'TaskDefinition': (basestring, True), - } - - -class Environment(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, True), - } - - -class MountPoint(AWSProperty): - props = { - 'ContainerPath': (basestring, True), - 'SourceVolume': (basestring, True), - 'ReadOnly': (boolean, False), - } - - -class PortMapping(AWSProperty): - props = { - 'ContainerPort': (network_port, True), - 'HostPort': (network_port, False), - 'Protocol': (basestring, False), - } - - -class VolumesFrom(AWSProperty): - props = { - 'SourceContainer': (basestring, True), - 'ReadOnly': (boolean, False), - } - - -class HostEntry(AWSProperty): - props = { - 'Hostname': (basestring, True), - 'IpAddress': (basestring, True), - } - - -class Device(AWSProperty): - props = { - 'ContainerPath': (basestring, False), - 'HostPath': (basestring, False), - 'Permissions': ([basestring], False), - } - - -class HealthCheck(AWSProperty): - props = { - 'Command': ([basestring], True), - 'Interval': (integer, False), - 'Retries': (integer, False), - 'StartPeriod': (integer, False), - 'Timeout': (integer, False), - } - - -class KernelCapabilities(AWSProperty): - props = { - 'Add': ([basestring], False), - 'Drop': ([basestring], False), - } - - -class LinuxParameters(AWSProperty): - props = { - 'Capabilities': (KernelCapabilities, False), - 'Devices': ([Device], False), - 'InitProcessEnabled': (boolean, False), - } - - -class LogConfiguration(AWSProperty): - props = { - 'LogDriver': (basestring, True), - 'Options': (dict, False), - } - - -class RepositoryCredentials(AWSProperty): - props = { - 'CredentialsParameter': (basestring, False) - } - - -class Ulimit(AWSProperty): - props = { - 'HardLimit': (integer, True), - 'Name': (basestring, False), - 'SoftLimit': (integer, True), - } - - -class ContainerDefinition(AWSProperty): - props = { - 'Command': ([basestring], False), - 'Cpu': (positive_integer, False), - 'DisableNetworking': (boolean, False), - 'DnsSearchDomains': ([basestring], False), - 'DnsServers': ([basestring], False), - 'DockerLabels': (dict, False), - 'DockerSecurityOptions': ([basestring], False), - 'EntryPoint': ([basestring], False), - 'Environment': ([Environment], False), - 'Essential': (boolean, False), - 'ExtraHosts': ([HostEntry], False), - 'HealthCheck': (HealthCheck, False), - 'Hostname': (basestring, False), - 'Image': (basestring, True), - 'Links': ([basestring], False), - 'LinuxParameters': (LinuxParameters, False), - 'LogConfiguration': (LogConfiguration, False), - 'Memory': (positive_integer, False), - 'MemoryReservation': (positive_integer, False), - 'MountPoints': ([MountPoint], False), - 'Name': (basestring, True), - 'PortMappings': ([PortMapping], False), - 'Privileged': (boolean, False), - 'ReadonlyRootFilesystem': (boolean, False), - 'RepositoryCredentials': (RepositoryCredentials, False), - 'Ulimits': ([Ulimit], False), - 'User': (basestring, False), - 'VolumesFrom': ([VolumesFrom], False), - 'WorkingDirectory': (basestring, False), - } - - -class Host(AWSProperty): - props = { - 'SourcePath': (basestring, False), - } - - -class DockerVolumeConfiguration(AWSProperty): - props = { - 'Autoprovision': (boolean, False), - 'Driver': (basestring, False), - 'DriverOpts': (dict, False), - 'Labels': (dict, False), - 'Scope': (scope_validator, False) - } - - -class Volume(AWSProperty): - props = { - 'DockerVolumeConfiguration': (DockerVolumeConfiguration, False), - 'Name': (basestring, True), - 'Host': (Host, False), - } - - -class TaskDefinition(AWSObject): - resource_type = "AWS::ECS::TaskDefinition" - - props = { - 'ContainerDefinitions': ([ContainerDefinition], True), - 'Cpu': (basestring, False), - 'ExecutionRoleArn': (basestring, False), - 'Family': (basestring, False), - 'Memory': (basestring, False), - 'NetworkMode': (basestring, False), - 'PlacementConstraints': ([PlacementConstraint], False), - 'RequiresCompatibilities': ([basestring], False), - 'TaskRoleArn': (basestring, False), - 'Volumes': ([Volume], False), - } diff --git a/troposphere/efs.py b/troposphere/efs.py deleted file mode 100644 index bbe0ef8ec..000000000 --- a/troposphere/efs.py +++ /dev/null @@ -1,46 +0,0 @@ -from . import AWSObject, Tags -from .validators import boolean - -Bursting = 'bursting' -Provisioned = 'provisioned' - - -def throughput_mode_validator(mode): - valid_modes = [Bursting, Provisioned] - if mode not in valid_modes: - raise ValueError( - 'ThroughputMode must be one of: "%s"' % (', '.join(valid_modes)) - ) - return mode - - -def provisioned_throughput_validator(throughput): - if throughput < 0.0: - raise ValueError( - 'ProvisionedThroughputInMibps must be greater than or equal to 0.0' - ) - return throughput - - -class FileSystem(AWSObject): - resource_type = "AWS::EFS::FileSystem" - - props = { - 'Encrypted': (boolean, False), - 'FileSystemTags': (Tags, False), - 'KmsKeyId': (basestring, False), - 'PerformanceMode': (basestring, False), - 'ProvisionedThroughputInMibps': (float, False), - 'ThroughputMode': (throughput_mode_validator, False), - } - - -class MountTarget(AWSObject): - resource_type = "AWS::EFS::MountTarget" - - props = { - 'FileSystemId': (basestring, True), - 'IpAddress': (basestring, False), - 'SecurityGroups': ([basestring], True), - 'SubnetId': (basestring, True), - } diff --git a/troposphere/eks.py b/troposphere/eks.py deleted file mode 100644 index 7446c95f1..000000000 --- a/troposphere/eks.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty - - -class ResourcesVpcConfig(AWSProperty): - props = { - 'SecurityGroupIds': ([basestring], False), - 'SubnetIds': ([basestring], True), - } - - -class Cluster(AWSObject): - resource_type = "AWS::EKS::Cluster" - - props = { - 'Name': (basestring, False), - 'ResourcesVpcConfig': (ResourcesVpcConfig, True), - 'RoleArn': (basestring, True), - 'Version': (basestring, False), - } diff --git a/troposphere/elasticache.py b/troposphere/elasticache.py deleted file mode 100644 index f35d005d2..000000000 --- a/troposphere/elasticache.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -import re - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer, network_port - - -def validate_node_group_id(node_group_id): - if re.match(r'\d{1,4}', node_group_id): - return node_group_id - raise ValueError("Invalid NodeGroupId: %s" % node_group_id) - - -class CacheCluster(AWSObject): - resource_type = "AWS::ElastiCache::CacheCluster" - - props = { - 'AutoMinorVersionUpgrade': (boolean, False), - 'AZMode': (basestring, False), - 'CacheNodeType': (basestring, True), - 'CacheParameterGroupName': (basestring, False), - 'CacheSecurityGroupNames': ([basestring], False), - 'CacheSubnetGroupName': (basestring, False), - 'ClusterName': (basestring, False), - 'Engine': (basestring, True), - 'EngineVersion': (basestring, False), - 'NotificationTopicArn': (basestring, False), - 'NumCacheNodes': (integer, True), - 'Port': (integer, False), - 'PreferredAvailabilityZone': (basestring, False), - 'PreferredAvailabilityZones': ([basestring], False), - 'PreferredMaintenanceWindow': (basestring, False), - 'SnapshotArns': ([basestring], False), - 'SnapshotName': (basestring, False), - 'SnapshotRetentionLimit': (integer, False), - 'SnapshotWindow': (basestring, False), - 'Tags': (Tags, False), - 'VpcSecurityGroupIds': ([basestring], False), - } - - def validate(self): - # Check that AZMode is "cross-az" if more than one Availability zone - # is specified in PreferredAvailabilityZones - preferred_azs = self.properties.get('PreferredAvailabilityZones') - if preferred_azs is not None and \ - isinstance(preferred_azs, list) and \ - len(preferred_azs) > 1: - if self.properties.get('AZMode') != 'cross-az': - raise ValueError('AZMode must be "cross-az" if more than one a' - 'vailability zone is specified in PreferredAv' - 'ailabilityZones: http://docs.aws.amazon.com/' - 'AWSCloudFormation/latest/UserGuide/aws-prope' - 'rties-elasticache-cache-cluster.html#cfn-ela' - 'sticache-cachecluster-azmode') - - return True - - -class ParameterGroup(AWSObject): - resource_type = "AWS::ElastiCache::ParameterGroup" - - props = { - 'CacheParameterGroupFamily': (basestring, True), - 'Description': (basestring, True), - 'Properties': (dict, True), - } - - -class SecurityGroup(AWSObject): - resource_type = "AWS::ElastiCache::SecurityGroup" - - props = { - 'Description': (basestring, False), - } - - -class SecurityGroupIngress(AWSObject): - resource_type = "AWS::ElastiCache::SecurityGroupIngress" - - props = { - 'CacheSecurityGroupName': (basestring, True), - 'EC2SecurityGroupName': (basestring, True), - 'EC2SecurityGroupOwnerId': (basestring, False), - } - - -class SubnetGroup(AWSObject): - resource_type = "AWS::ElastiCache::SubnetGroup" - - props = { - 'CacheSubnetGroupName': (basestring, False), - 'Description': (basestring, True), - 'SubnetIds': (list, True), - } - - -class ReplicationGroup(AWSObject): - resource_type = "AWS::ElastiCache::ReplicationGroup" - - props = { - 'AtRestEncryptionEnabled': (boolean, False), - 'AuthToken': (basestring, False), - 'AutoMinorVersionUpgrade': (boolean, False), - 'AutomaticFailoverEnabled': (boolean, False), - 'CacheNodeType': (basestring, True), - 'CacheParameterGroupName': (basestring, False), - 'CacheSecurityGroupNames': ([basestring], False), - 'CacheSubnetGroupName': (basestring, False), - 'Engine': (basestring, True), - 'EngineVersion': (basestring, False), - 'NodeGroupConfiguration': (list, False), - 'NotificationTopicArn': (basestring, False), - 'NumCacheClusters': (integer, False), - 'NumNodeGroups': (integer, False), - 'Port': (network_port, False), - 'PreferredCacheClusterAZs': ([basestring], False), - 'PreferredMaintenanceWindow': (basestring, False), - 'PrimaryClusterId': (basestring, False), - 'ReplicasPerNodeGroup': (integer, False), - 'ReplicationGroupDescription': (basestring, True), - 'ReplicationGroupId': (basestring, False), - 'SecurityGroupIds': ([basestring], False), - 'SnapshotArns': ([basestring], False), - 'SnapshotName': (basestring, False), - 'SnapshotRetentionLimit': (integer, False), - 'SnapshottingClusterId': (basestring, False), - 'SnapshotWindow': (basestring, False), - 'Tags': (Tags, False), - 'TransitEncryptionEnabled': (boolean, False), - } - - def validate(self): - if 'NumCacheClusters' not in self.properties and \ - 'NumNodeGroups' not in self.properties and \ - 'ReplicasPerNodeGroup' not in self.properties and \ - 'PrimaryClusterId' not in self.properties: - raise ValueError( - 'One of PrimaryClusterId, NumCacheClusters, ' - 'NumNodeGroups or ReplicasPerNodeGroup are required' - 'in type AWS::ElastiCache::ReplicationGroup' - ) - - return True - - -class NodeGroupConfiguration(AWSProperty): - props = { - 'NodeGroupId': (validate_node_group_id, False), - 'PrimaryAvailabilityZone': (basestring, False), - 'ReplicaAvailabilityZones': (basestring, False), - 'ReplicaCount': (integer, False), - 'Slots': (basestring, False), - } diff --git a/troposphere/elasticbeanstalk.py b/troposphere/elasticbeanstalk.py deleted file mode 100644 index 5049bd2cc..000000000 --- a/troposphere/elasticbeanstalk.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer - -WebServer = "WebServer" -Worker = "Worker" -WebServerType = "Standard" -WorkerType = "SQS/HTTP" - - -class MaxAgeRule(AWSProperty): - props = { - 'DeleteSourceFromS3': (boolean, False), - 'Enabled': (boolean, False), - 'MaxAgeInDays': (integer, False), - } - - -class MaxCountRule(AWSProperty): - props = { - 'DeleteSourceFromS3': (boolean, False), - 'Enabled': (boolean, False), - 'MaxCount': (integer, False), - } - - -class ApplicationVersionLifecycleConfig(AWSProperty): - props = { - 'MaxAgeRule': (MaxAgeRule, False), - 'MaxCountRule': (MaxCountRule, False), - } - - -class SourceBundle(AWSProperty): - props = { - 'S3Bucket': (basestring, True), - 'S3Key': (basestring, True), - } - - -class SourceConfiguration(AWSProperty): - props = { - 'ApplicationName': (basestring, True), - 'TemplateName': (basestring, True), - } - - -class ApplicationResourceLifecycleConfig(AWSProperty): - props = { - 'ServiceRole': (basestring, False), - 'VersionLifecycleConfig': (ApplicationVersionLifecycleConfig, False), - } - - -class OptionSettings(AWSProperty): - props = { - 'Namespace': (basestring, True), - 'OptionName': (basestring, True), - 'ResourceName': (basestring, False), - 'Value': (basestring, True), - } - - -class Application(AWSObject): - resource_type = "AWS::ElasticBeanstalk::Application" - - props = { - 'ApplicationName': (basestring, False), - 'Description': (basestring, False), - 'ResourceLifecycleConfig': (ApplicationResourceLifecycleConfig, False), - } - - -class ApplicationVersion(AWSObject): - resource_type = "AWS::ElasticBeanstalk::ApplicationVersion" - - props = { - 'ApplicationName': (basestring, True), - 'Description': (basestring, False), - 'SourceBundle': (SourceBundle, False), - } - - -class ConfigurationTemplate(AWSObject): - resource_type = "AWS::ElasticBeanstalk::ConfigurationTemplate" - - props = { - 'ApplicationName': (basestring, True), - 'Description': (basestring, False), - 'EnvironmentId': (basestring, False), - 'OptionSettings': ([OptionSettings], False), - 'PlatformArn': (basestring, False), - 'SolutionStackName': (basestring, False), - 'SourceConfiguration': (SourceConfiguration, False), - } - - -def validate_tier_name(name): - valid_names = [WebServer, Worker] - if name not in valid_names: - raise ValueError('Tier name needs to be one of %r' % valid_names) - return name - - -def validate_tier_type(tier_type): - valid_types = [WebServerType, WorkerType] - if tier_type not in valid_types: - raise ValueError('Tier type needs to be one of %r' % valid_types) - return tier_type - - -class Tier(AWSProperty): - props = { - 'Name': (validate_tier_name, False), - 'Type': (validate_tier_type, False), - 'Version': (basestring, False), - } - - -class Environment(AWSObject): - resource_type = "AWS::ElasticBeanstalk::Environment" - - props = { - 'ApplicationName': (basestring, True), - 'CNAMEPrefix': (basestring, False), - 'Description': (basestring, False), - 'EnvironmentName': (basestring, False), - 'OptionSettings': ([OptionSettings], False), - 'PlatformArn': (basestring, False), - 'SolutionStackName': (basestring, False), - 'Tags': (Tags, False), - 'TemplateName': (basestring, False), - 'Tier': (Tier, False), - 'VersionLabel': (basestring, False), - } diff --git a/troposphere/elasticloadbalancing.py b/troposphere/elasticloadbalancing.py deleted file mode 100644 index ac2925256..000000000 --- a/troposphere/elasticloadbalancing.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import ( - boolean, elb_name, integer_range, positive_integer, network_port, integer) - - -class AppCookieStickinessPolicy(AWSProperty): - props = { - 'CookieName': (basestring, True), - 'PolicyName': (basestring, True), - } - - -class HealthCheck(AWSProperty): - props = { - 'HealthyThreshold': (integer_range(2, 10), True), - 'Interval': (positive_integer, True), - 'Target': (basestring, True), - 'Timeout': (positive_integer, True), - 'UnhealthyThreshold': (integer_range(2, 10), True), - } - - -class LBCookieStickinessPolicy(AWSProperty): - props = { - 'CookieExpirationPeriod': (basestring, False), - 'PolicyName': (basestring, False), - } - - -class Listener(AWSProperty): - props = { - 'InstancePort': (network_port, True), - 'InstanceProtocol': (basestring, False), - 'LoadBalancerPort': (network_port, True), - 'PolicyNames': (list, False), - 'Protocol': (basestring, True), - 'SSLCertificateId': (basestring, False), - } - - -class Policy(AWSProperty): - props = { - 'Attributes': ([dict], False), - 'InstancePorts': (list, False), - 'LoadBalancerPorts': (list, False), - 'PolicyName': (basestring, True), - 'PolicyType': (basestring, True), - } - - -class ConnectionDrainingPolicy(AWSProperty): - props = { - 'Enabled': (bool, True), - 'Timeout': (integer, False) - } - - -class ConnectionSettings(AWSProperty): - props = { - 'IdleTimeout': (integer, True), - } - - -class AccessLoggingPolicy(AWSProperty): - props = { - 'EmitInterval': (integer, False), - 'Enabled': (bool, True), - 'S3BucketName': (basestring, False), - 'S3BucketPrefix': (basestring, False), - } - - -class LoadBalancer(AWSObject): - resource_type = "AWS::ElasticLoadBalancing::LoadBalancer" - - props = { - 'AccessLoggingPolicy': (AccessLoggingPolicy, False), - 'AppCookieStickinessPolicy': (list, False), - 'AvailabilityZones': (list, False), - 'ConnectionDrainingPolicy': (ConnectionDrainingPolicy, False), - 'ConnectionSettings': (ConnectionSettings, False), - 'CrossZone': (boolean, False), - 'HealthCheck': (HealthCheck, False), - 'Instances': (list, False), - 'LBCookieStickinessPolicy': (list, False), - 'LoadBalancerName': (elb_name, False), - 'Listeners': (list, True), - 'Policies': (list, False), - 'Scheme': (basestring, False), - 'SecurityGroups': (list, False), - 'Subnets': (list, False), - 'Tags': ((Tags, list), False), - } diff --git a/troposphere/elasticloadbalancingv2.py b/troposphere/elasticloadbalancingv2.py deleted file mode 100644 index e501e0fe7..000000000 --- a/troposphere/elasticloadbalancingv2.py +++ /dev/null @@ -1,224 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, If, Tags -from .validators import ( - elb_name, exactly_one, network_port, - tg_healthcheck_port, integer, - one_of -) - - -class LoadBalancerAttributes(AWSProperty): - props = { - 'Key': (basestring, False), - 'Value': (basestring, False) - } - - -class Certificate(AWSProperty): - props = { - 'CertificateArn': (basestring, False) - } - - -class RedirectConfig(AWSProperty): - # https://docs.aws.amazon.com/ - # AWSCloudFormation/latest/UserGuide/ - # aws-properties-elasticloadbalancingv2-listener-redirectconfig.html - props = { - 'Host': (basestring, False), - 'Path': (basestring, False), - 'Port': (basestring, False), - 'Protocol': (basestring, False), - 'Query': (basestring, False), - 'StatusCode': (basestring, True), - } - - def validate(self): - one_of(self.__class__.__name__, - self.properties, - 'StatusCode', - ['HTTP_301', 'HTTP_302']) - - -class FixedResponseConfig(AWSProperty): - props = { - 'ContentType': (basestring, False), - 'MessageBody': (basestring, False), - 'StatusCode': (basestring, False), - } - - def validate(self): - one_of(self.__class__.__name__, - self.properties, - 'ContentType', - ['text/plain', 'text/css', 'text/html', - 'application/javascript', 'application/json']) - - -class Action(AWSProperty): - props = { - 'Type': (basestring, True), - 'TargetGroupArn': (basestring, False), - 'RedirectConfig': (RedirectConfig, False), - 'FixedResponseConfig': (FixedResponseConfig, False) - } - - def validate(self): - one_of(self.__class__.__name__, - self.properties, - 'Type', - ['forward', 'redirect', 'fixed-response']) - - def requires(action_type, prop): - if self.properties.get('Type') == action_type and \ - prop not in self.properties: - raise ValueError( - 'Type "%s" requires definition of "%s"' % ( - action_type, prop - ) - ) - - if prop in self.properties and \ - self.properties.get('Type') != action_type: - raise ValueError( - 'Definition of "%s" allowed only with ' - 'type "%s", was: "%s"' % ( - prop, action_type, self.properties.get('Type') - ) - ) - - requires('forward', 'TargetGroupArn') - requires('redirect', 'RedirectConfig') - requires('fixed-response', 'FixedResponseConfig') - - -class Condition(AWSProperty): - props = { - 'Field': (basestring, True), - 'Values': ([basestring], True) - } - - -class Matcher(AWSProperty): - props = { - 'HttpCode': (basestring, False) - } - - -class SubnetMapping(AWSProperty): - props = { - 'AllocationId': (basestring, True), - 'SubnetId': (basestring, True) - } - - -class TargetGroupAttribute(AWSProperty): - props = { - 'Key': (basestring, False), - 'Value': (basestring, False) - } - - -class TargetDescription(AWSProperty): - props = { - 'AvailabilityZone': (basestring, False), - 'Id': (basestring, True), - 'Port': (network_port, False) - } - - -class Listener(AWSObject): - resource_type = "AWS::ElasticLoadBalancingV2::Listener" - - props = { - 'Certificates': ([Certificate], False), - 'DefaultActions': ([Action], True), - 'LoadBalancerArn': (basestring, True), - 'Port': (network_port, True), - 'Protocol': (basestring, True), - 'SslPolicy': (basestring, False) - } - - -class ListenerCertificate(AWSObject): - resource_type = "AWS::ElasticLoadBalancingV2::ListenerCertificate" - - props = { - 'Certificates': ([Certificate], True), - 'ListenerArn': (basestring, True), - } - - -class ListenerRule(AWSObject): - resource_type = "AWS::ElasticLoadBalancingV2::ListenerRule" - - props = { - 'Actions': ([Action], True), - 'Conditions': ([Condition], True), - 'ListenerArn': (basestring, True), - 'Priority': (integer, True) - } - - -TARGET_TYPE_INSTANCE = 'instance' -TARGET_TYPE_IP = 'ip' - - -class TargetGroup(AWSObject): - resource_type = "AWS::ElasticLoadBalancingV2::TargetGroup" - - props = { - 'HealthCheckIntervalSeconds': (integer, False), - 'HealthCheckPath': (basestring, False), - 'HealthCheckPort': (tg_healthcheck_port, False), - 'HealthCheckProtocol': (basestring, False), - 'HealthCheckTimeoutSeconds': (integer, False), - 'HealthyThresholdCount': (integer, False), - 'Matcher': (Matcher, False), - 'Name': (basestring, False), - 'Port': (network_port, True), - 'Protocol': (basestring, True), - 'Tags': ((Tags, list), False), - 'TargetGroupAttributes': ([TargetGroupAttribute], False), - 'Targets': ([TargetDescription], False), - 'TargetType': (basestring, False), - 'UnhealthyThresholdCount': (integer, False), - 'VpcId': (basestring, True), - } - - -class LoadBalancer(AWSObject): - resource_type = "AWS::ElasticLoadBalancingV2::LoadBalancer" - - props = { - 'LoadBalancerAttributes': ([LoadBalancerAttributes], False), - 'Name': (elb_name, False), - 'Scheme': (basestring, False), - 'IpAddressType': (basestring, False), - 'SecurityGroups': (list, False), - 'SubnetMappings': ([SubnetMapping], False), - 'Subnets': (list, False), - 'Tags': ((Tags, list), False), - 'Type': (basestring, False), - } - - def validate(self): - conds = [ - 'SubnetMappings', - 'Subnets', - ] - - def check_if(names, props): - validated = [] - for name in names: - validated.append(name in props and isinstance(props[name], If)) - return all(validated) - - if check_if(conds, self.properties): - return - - exactly_one(self.__class__.__name__, self.properties, conds) diff --git a/troposphere/elasticsearch.py b/troposphere/elasticsearch.py deleted file mode 100644 index e9800705a..000000000 --- a/troposphere/elasticsearch.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) 2012-2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSProperty, AWSObject, Tags -from .validators import boolean, integer, integer_range, positive_integer - -VALID_VOLUME_TYPES = ('standard', 'gp2', 'io1') - -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -def validate_volume_type(volume_type): - """Validate VolumeType for ElasticsearchDomain""" - if volume_type not in VALID_VOLUME_TYPES: - raise ValueError("Elasticsearch Domain VolumeType must be one of: %s" % - ", ".join(VALID_VOLUME_TYPES)) - return volume_type - - -class EBSOptions(AWSProperty): - props = { - 'EBSEnabled': (boolean, False), - 'Iops': (positive_integer, False), - 'VolumeSize': (integer, False), - 'VolumeType': (validate_volume_type, False) - } - - def validate(self): - volume_type = self.properties.get('VolumeType') - iops = self.properties.get('Iops') - if volume_type == 'io1' and not iops: - raise ValueError("Must specify Iops if VolumeType is 'io1'.") - - -class ElasticsearchClusterConfig(AWSProperty): - props = { - 'DedicatedMasterCount': (integer, False), - 'DedicatedMasterEnabled': (boolean, False), - 'DedicatedMasterType': (basestring, False), - 'InstanceCount': (integer, False), - 'InstanceType': (basestring, False), - 'ZoneAwarenessEnabled': (boolean, False) - } - - -class EncryptionAtRestOptions(AWSProperty): - props = { - 'Enabled': (boolean, False), - 'KmsKeyId': (basestring, False), - } - - -class SnapshotOptions(AWSProperty): - props = { - 'AutomatedSnapshotStartHour': (integer_range(0, 23), False) - } - - -class VPCOptions(AWSProperty): - props = { - "SecurityGroupIds": ([basestring], False), - "SubnetIds": ([basestring], False) - } - - -class Domain(AWSObject): - resource_type = "AWS::Elasticsearch::Domain" - - props = { - 'AccessPolicies': (policytypes, False), - 'AdvancedOptions': (dict, False), - 'DomainName': (basestring, False), - 'EBSOptions': (EBSOptions, False), - 'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False), - 'ElasticsearchVersion': (basestring, False), - 'EncryptionAtRestOptions': (EncryptionAtRestOptions, False), - 'SnapshotOptions': (SnapshotOptions, False), - 'Tags': ((Tags, list), False), - 'VPCOptions': (VPCOptions, False) - } - - -# Backward compatibility -ElasticsearchDomain = Domain diff --git a/troposphere/emr.py b/troposphere/emr.py deleted file mode 100644 index 63ff447da..000000000 --- a/troposphere/emr.py +++ /dev/null @@ -1,426 +0,0 @@ -# Copyright (c) 2012-2013, Antonio Alonso Dominguez -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, AWSHelperFn, Tags -from .validators import ( - boolean, integer, positive_integer, double, defer -) - - -CHANGE_IN_CAPACITY = 'CHANGE_IN_CAPACITY' -PERCENT_CHANGE_IN_CAPACITY = 'PERCENT_CHANGE_IN_CAPACITY' -EXACT_CAPACITY = 'EXACT_CAPACITY' -ACTIONS_ON_FAILURE = ('TERMINATE_CLUSTER', 'CANCEL_AND_WAIT', - 'TERMINATE_JOB_FLOW') - - -def validate_action_on_failure(action_on_failure): - """Validate action on failure for EMR StepConfig """ - - if action_on_failure not in ACTIONS_ON_FAILURE: - raise ValueError("StepConfig ActionOnFailure must be one of: %s" % - ", ".join(ACTIONS_ON_FAILURE)) - return action_on_failure - - -class KeyValue(AWSProperty): - props = { - 'Key': (basestring, True), - 'Value': (basestring, True) - } - - def __init__(self, key=None, value=None, **kwargs): - # provided for backward compatibility - if key is not None: - kwargs['Key'] = key - if value is not None: - kwargs['Value'] = value - super(KeyValue, self).__init__(**kwargs) - - -MetricDimension = KeyValue - - -def additional_info_validator(xs): - if not isinstance(xs, dict): - raise ValueError("AdditionalInfo must be a dict of " - "string to string pairs") - for k, v in xs.iteritems(): - if not isinstance(k, basestring): - raise ValueError('AdditionalInfo keys must be strings') - if not isinstance(v, basestring): - raise ValueError('AdditionalInfo values must be strings') - - return xs - - -class SecurityConfiguration(AWSObject): - resource_type = "AWS::EMR::SecurityConfiguration" - - props = { - 'Name': (basestring, False), - 'SecurityConfiguration': (dict, True) - } - - -class Application(AWSProperty): - props = { - 'AdditionalInfo': (additional_info_validator, False), - 'Args': ([basestring], False), - 'Name': (basestring, False), - 'Version': (basestring, False) - } - - -class ScriptBootstrapActionConfig(AWSProperty): - props = { - 'Args': ([basestring], False), - 'Path': (basestring, True) - } - - -class BootstrapActionConfig(AWSProperty): - props = { - 'Name': (basestring, True), - 'ScriptBootstrapAction': (ScriptBootstrapActionConfig, True) - } - - -def properties_validator(xs): - if not isinstance(xs, dict): - raise ValueError("ConfigurationProperties must be a dict of " - "string to string pairs") - for k, v in xs.iteritems(): - if not isinstance(k, basestring): - raise ValueError('ConfigurationProperties keys must be strings') - if not isinstance(v, basestring) and not isinstance(v, AWSHelperFn): - raise ValueError('ConfigurationProperties values must be strings' - ' or helper functions') - - return xs - - -class Configuration(AWSProperty): - props = { - 'Classification': (basestring, False), - 'ConfigurationProperties': (properties_validator, False) - } - - -# we must define this one afterwards since Configuration does not exist -# before Configuration is done initializing -Configuration.props['Configurations'] = ([Configuration], False) - - -def market_validator(x): - valid_values = ['ON_DEMAND', 'SPOT'] - if x not in valid_values: - raise ValueError("Market must be one of: %s" % - ', '.join(valid_values)) - return x - - -def volume_type_validator(x): - valid_values = ['standard', 'io1', 'gp2'] - if x not in valid_values: - raise ValueError("VolumeType must be one of: %s" % - ', '.join(valid_values)) - return x - - -class VolumeSpecification(AWSProperty): - props = { - 'Iops': (integer, False), - 'SizeInGB': (integer, True), - 'VolumeType': (volume_type_validator, True) - } - - -class EbsBlockDeviceConfigs(AWSProperty): - props = { - 'VolumeSpecification': (VolumeSpecification, True), - 'VolumesPerInstance': (integer, False) - } - - -class EbsConfiguration(AWSProperty): - props = { - 'EbsBlockDeviceConfigs': ([EbsBlockDeviceConfigs], False), - 'EbsOptimized': (boolean, False) - } - - -class ScalingConstraints(AWSProperty): - props = { - 'MinCapacity': (integer, True), - 'MaxCapacity': (integer, True) - } - - -class CloudWatchAlarmDefinition(AWSProperty): - props = { - 'ComparisonOperator': (basestring, True), - 'Dimensions': ([MetricDimension], False), - 'EvaluationPeriods': (positive_integer, False), - 'MetricName': (basestring, True), - 'Namespace': (basestring, False), - 'Period': (positive_integer, True), - 'Statistic': (basestring, False), - 'Threshold': (positive_integer, True), - 'Unit': (basestring, False), - } - - -class ScalingTrigger(AWSProperty): - props = { - 'CloudWatchAlarmDefinition': (CloudWatchAlarmDefinition, True), - } - - -class SimpleScalingPolicyConfiguration(AWSProperty): - props = { - 'AdjustmentType': (basestring, False), - 'CoolDown': (positive_integer, False), - 'ScalingAdjustment': (defer, True), - } - - def validate(self): - if 'AdjustmentType' in self.properties and \ - 'ScalingAdjustment' in self.properties: - - valid_values = [ - CHANGE_IN_CAPACITY, - PERCENT_CHANGE_IN_CAPACITY, - EXACT_CAPACITY, - ] - - adjustment_type = self.properties.get('AdjustmentType', None) - scaling_adjustment = self.properties.get('ScalingAdjustment', None) - - if adjustment_type not in valid_values: - raise ValueError( - 'Only CHANGE_IN_CAPACITY, PERCENT_CHANGE_IN_CAPACITY, or' - ' EXACT_CAPACITY are valid AdjustmentTypes' - ) - - if adjustment_type == CHANGE_IN_CAPACITY: - integer(scaling_adjustment) - elif adjustment_type == PERCENT_CHANGE_IN_CAPACITY: - double(scaling_adjustment) - f = float(scaling_adjustment) - if f < 0.0 or f > 1.0: - raise ValueError( - 'ScalingAdjustment value must be between 0.0 and 1.0' - ' value was %0.2f' % f - ) - elif adjustment_type == EXACT_CAPACITY: - positive_integer(scaling_adjustment) - else: - raise ValueError('ScalingAdjustment value must be' - ' an integer or a float') - - -class ScalingAction(AWSProperty): - props = { - 'Market': (market_validator, False), - 'SimpleScalingPolicyConfiguration': ( - SimpleScalingPolicyConfiguration, True - ) - } - - -class ScalingRule(AWSProperty): - props = { - 'Action': (ScalingAction, True), - 'Description': (basestring, False), - 'Name': (basestring, True), - 'Trigger': (ScalingTrigger, True), - } - - -class AutoScalingPolicy(AWSProperty): - props = { - 'Constraints': (ScalingConstraints, True), - 'Rules': ([ScalingRule], False), - } - - -class InstanceGroupConfigProperty(AWSProperty): - props = { - 'AutoScalingPolicy': (AutoScalingPolicy, False), - 'BidPrice': (basestring, False), - 'Configurations': ([Configuration], False), - 'EbsConfiguration': (EbsConfiguration, False), - 'InstanceCount': (positive_integer, True), - 'InstanceType': (basestring, True), - 'Market': (market_validator, False), - 'Name': (basestring, False), - } - - -class SpotProvisioningSpecification(AWSProperty): - props = { - 'BlockDurationMinutes': (positive_integer, False), - 'TimeoutAction': (basestring, True), - 'TimeoutDurationMinutes': (positive_integer, True), - } - - -class InstanceFleetProvisioningSpecifications(AWSProperty): - props = { - 'SpotSpecification': (SpotProvisioningSpecification, True), - } - - -class InstanceTypeConfig(AWSProperty): - props = { - 'BidPrice': (basestring, False), - 'BidPriceAsPercentageOfOnDemandPrice': (basestring, False), - 'Configurations': ([Configuration], False), - 'EbsConfiguration': (EbsConfiguration, False), - 'InstanceType': (basestring, True), - 'WeightedCapacity': (positive_integer, False), - } - - -class InstanceFleetConfigProperty(AWSProperty): - props = { - 'InstanceTypeConfigs': ([InstanceTypeConfig], False), - 'LaunchSpecifications': - (InstanceFleetProvisioningSpecifications, False), - 'Name': (basestring, False), - 'TargetOnDemandCapacity': (positive_integer, False), - 'TargetSpotCapacity': (positive_integer, False), - } - - -class PlacementType(AWSProperty): - props = { - 'AvailabilityZone': (basestring, True) - } - - -class JobFlowInstancesConfig(AWSProperty): - props = { - 'AdditionalMasterSecurityGroups': ([basestring], False), - 'AdditionalSlaveSecurityGroups': ([basestring], False), - 'CoreInstanceFleet': (InstanceFleetConfigProperty, False), - 'CoreInstanceGroup': (InstanceGroupConfigProperty, False), - 'Ec2KeyName': (basestring, False), - 'Ec2SubnetId': (basestring, False), - 'EmrManagedMasterSecurityGroup': (basestring, False), - 'EmrManagedSlaveSecurityGroup': (basestring, False), - 'HadoopVersion': (basestring, False), - 'KeepJobFlowAliveWhenNoSteps': (boolean, False), - 'MasterInstanceFleet': (InstanceFleetConfigProperty, False), - 'MasterInstanceGroup': (InstanceGroupConfigProperty, False), - 'Placement': (PlacementType, False), - 'ServiceAccessSecurityGroup': (basestring, False), - 'TerminationProtected': (boolean, False) - } - - -class KerberosAttributes(AWSProperty): - props = { - 'ADDomainJoinPassword': (basestring, False), - 'ADDomainJoinUser': (basestring, False), - 'CrossRealmTrustPrincipalPassword': (basestring, False), - 'KdcAdminPassword': (basestring, True), - 'Realm': (basestring, True), - } - - -class HadoopJarStepConfig(AWSProperty): - props = { - 'Args': ([basestring], False), - 'Jar': (basestring, True), - 'MainClass': (basestring, False), - 'StepProperties': ([KeyValue], False) - } - - -class StepConfig(AWSProperty): - props = { - 'ActionOnFailure': (validate_action_on_failure, False), - 'HadoopJarStep': (HadoopJarStepConfig, True), - 'Name': (basestring, True), - } - - -class Cluster(AWSObject): - resource_type = "AWS::EMR::Cluster" - - props = { - 'AdditionalInfo': (dict, False), - 'Applications': ([Application], False), - 'AutoScalingRole': (basestring, False), - 'BootstrapActions': ([BootstrapActionConfig], False), - 'Configurations': ([Configuration], False), - 'CustomAmiId': (basestring, False), - 'EbsRootVolumeSize': (positive_integer, False), - 'Instances': (JobFlowInstancesConfig, True), - 'JobFlowRole': (basestring, True), - 'KerberosAttributes': (KerberosAttributes, False), - 'LogUri': (basestring, False), - 'Name': (basestring, True), - 'ReleaseLabel': (basestring, False), - 'ScaleDownBehavior': (basestring, False), - 'SecurityConfiguration': (basestring, False), - 'ServiceRole': (basestring, True), - 'Steps': ([StepConfig], False), - 'Tags': ((Tags, list), False), - 'VisibleToAllUsers': (boolean, False) - } - - -class InstanceFleetConfig(AWSObject): - resource_type = "AWS::EMR::InstanceFleetConfig" - - props = { - 'ClusterId': (basestring, True), - 'InstanceFleetType': (basestring, True), - 'InstanceTypeConfigs': ([InstanceTypeConfig], False), - 'LaunchSpecifications': - (InstanceFleetProvisioningSpecifications, False), - 'Name': (basestring, False), - 'TargetOnDemandCapacity': (positive_integer, False), - 'TargetSpotCapacity': (positive_integer, False), - } - - -class InstanceGroupConfig(AWSObject): - resource_type = "AWS::EMR::InstanceGroupConfig" - - props = { - 'AutoScalingPolicy': (AutoScalingPolicy, False), - 'BidPrice': (basestring, False), - 'Configurations': ([Configuration], False), - 'EbsConfiguration': (EbsConfiguration, False), - 'InstanceCount': (integer, True), - 'InstanceRole': (basestring, True), - 'InstanceType': (basestring, True), - 'JobFlowId': (basestring, True), - 'Market': (market_validator, False), - 'Name': (basestring, False) - } - - -def action_on_failure_validator(x): - valid_values = ['CONTINUE', 'CANCEL_AND_WAIT'] - if x not in valid_values: - raise ValueError("ActionOnFailure must be one of: %s" % - ', '.join(valid_values)) - return x - - -class Step(AWSObject): - resource_type = "AWS::EMR::Step" - - props = { - 'ActionOnFailure': (action_on_failure_validator, True), - 'HadoopJarStep': (HadoopJarStepConfig, True), - 'JobFlowId': (basestring, True), - 'Name': (basestring, True) - } diff --git a/troposphere/events.py b/troposphere/events.py deleted file mode 100644 index 7bb70cf69..000000000 --- a/troposphere/events.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty - - -class EcsParameters(AWSProperty): - props = { - "TaskCount": (int, False), - "TaskDefinitionArn": (basestring, True), - } - - -class InputTransformer(AWSProperty): - props = { - 'InputPathsMap': (dict, False), - 'InputTemplate': (basestring, True), - } - - -class KinesisParameters(AWSProperty): - props = { - 'PartitionKeyPath': (basestring, True), - } - - -class RunCommandTarget(AWSProperty): - props = { - 'Key': (basestring, True), - 'Values': ([basestring], True), - } - - -class RunCommandParameters(AWSProperty): - props = { - 'RunCommandTargets': ([RunCommandTarget], True), - } - - -class Target(AWSProperty): - props = { - 'Arn': (basestring, True), - "EcsParameters": (EcsParameters, False), - 'Id': (basestring, True), - 'Input': (basestring, False), - 'InputPath': (basestring, False), - 'InputTransformer': (InputTransformer, False), - 'KinesisParameters': (KinesisParameters, False), - 'RoleArn': (basestring, False), - 'RunCommandParameters': (RunCommandParameters, False), - } - - -class Rule(AWSObject): - resource_type = "AWS::Events::Rule" - - props = { - - 'Description': (basestring, False), - 'EventPattern': (dict, False), - 'Name': (basestring, False), - 'ScheduleExpression': (basestring, False), - 'State': (basestring, False), - 'Targets': ([Target], False), - } diff --git a/troposphere/firehose.py b/troposphere/firehose.py deleted file mode 100644 index 436bee178..000000000 --- a/troposphere/firehose.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) 2016-2017, troposphere project -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, positive_integer - - -def processor_type_validator(x): - valid_types = ["Lambda"] - if x not in valid_types: - raise ValueError("Type must be one of: %s" % - ", ".join(valid_types)) - return x - - -def delivery_stream_type_validator(x): - valid_types = ["DirectPut", "KinesisStreamAsSource"] - if x not in valid_types: - raise ValueError("DeliveryStreamType must be one of: %s" % - ", ".join(valid_types)) - return x - - -def index_rotation_period_validator(x): - valid_types = ["NoRotation", "OneHour", "OneDay", "OneWeek", "OneMonth"] - if x not in valid_types: - raise ValueError("IndexRotationPeriod must be one of: %s" % - ", ".join(valid_types)) - return x - - -def s3_backup_mode_elastic_search_validator(x): - valid_types = ["FailedDocumentsOnly", "AllDocuments"] - if x not in valid_types: - raise ValueError("S3BackupMode must be one of: %s" % - ", ".join(valid_types)) - return x - - -def s3_backup_mode_extended_s3_validator(x): - valid_types = ["Disabled", "Enabled"] - if x not in valid_types: - raise ValueError("S3BackupMode must be one of: %s" % - ", ".join(valid_types)) - return x - - -class BufferingHints(AWSProperty): - props = { - 'IntervalInSeconds': (positive_integer, True), - 'SizeInMBs': (positive_integer, True) - } - - -class CloudWatchLoggingOptions(AWSProperty): - props = { - 'Enabled': (boolean, False), - 'LogGroupName': (basestring, False), # Conditional - 'LogStreamName': (basestring, False), # Conditional - } - - -class RetryOptions(AWSProperty): - props = { - 'DurationInSeconds': (positive_integer, True), - } - - -class KMSEncryptionConfig(AWSProperty): - props = { - 'AWSKMSKeyARN': (basestring, True), - } - - -class EncryptionConfiguration(AWSProperty): - props = { - 'KMSEncryptionConfig': (KMSEncryptionConfig, False), - 'NoEncryptionConfig': (basestring, False), - } - - -class S3Configuration(AWSProperty): - props = { - 'BucketARN': (basestring, True), - 'BufferingHints': (BufferingHints, True), - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'CompressionFormat': (basestring, True), - 'EncryptionConfiguration': (EncryptionConfiguration, False), - 'Prefix': (basestring, False), - 'RoleARN': (basestring, True) - } - - -class CopyCommand(AWSProperty): - props = { - 'CopyOptions': (basestring, False), - 'DataTableColumns': (basestring, False), - 'DataTableName': (basestring, True), - } - - -class ProcessorParameter(AWSProperty): - props = { - 'ParameterName': (basestring, True), - 'ParameterValue': (basestring, True), - } - - -class Processor(AWSProperty): - props = { - 'Parameters': ([ProcessorParameter], True), - 'Type': (processor_type_validator, True), - } - - -class ProcessingConfiguration(AWSProperty): - props = { - 'Enabled': (boolean, True), - 'Processors': ([Processor], True), - } - - -class ElasticsearchDestinationConfiguration(AWSProperty): - props = { - 'BufferingHints': (BufferingHints, True), - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'DomainARN': (basestring, True), - 'IndexName': (basestring, True), - 'IndexRotationPeriod': (index_rotation_period_validator, True), - 'ProcessingConfiguration': (ProcessingConfiguration, False), - 'RetryOptions': (RetryOptions, False), - 'RoleARN': (basestring, True), - 'S3BackupMode': (s3_backup_mode_elastic_search_validator, True), - 'S3Configuration': (S3Configuration, False), - 'TypeName': (basestring, True), - } - - -class RedshiftDestinationConfiguration(AWSProperty): - props = { - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'ClusterJDBCURL': (basestring, True), - 'CopyCommand': (CopyCommand, True), - 'Password': (basestring, True), - 'ProcessingConfiguration': (ProcessingConfiguration, False), - 'RoleARN': (basestring, True), - 'S3Configuration': (S3Configuration, True), - 'Username': (basestring, True), - } - - -class S3DestinationConfiguration(AWSProperty): - props = { - 'BucketARN': (basestring, True), - 'BufferingHints': (BufferingHints, True), - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'CompressionFormat': (basestring, True), - 'EncryptionConfiguration': (EncryptionConfiguration, False), - 'Prefix': (basestring, False), - 'RoleARN': (basestring, True), - } - - -class ExtendedS3DestinationConfiguration(AWSProperty): - props = { - 'BucketARN': (basestring, True), - 'BufferingHints': (BufferingHints, True), - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'CompressionFormat': (basestring, True), - 'EncryptionConfiguration': (EncryptionConfiguration, False), - 'Prefix': (basestring, True), - 'ProcessingConfiguration': (ProcessingConfiguration, False), - 'RoleARN': (basestring, True), - 'S3BackupConfiguration': (S3DestinationConfiguration, False), - 'S3BackupMode': (s3_backup_mode_extended_s3_validator, False), - } - - -class KinesisStreamSourceConfiguration(AWSProperty): - props = { - 'KinesisStreamARN': (basestring, True), - 'RoleARN': (basestring, True) - } - - -class SplunkRetryOptions(AWSProperty): - props = { - 'DurationInSeconds': (positive_integer, True), - } - - -class SplunkDestinationConfiguration(AWSProperty): - props = { - 'CloudWatchLoggingOptions': (CloudWatchLoggingOptions, False), - 'HECAcknowledgmentTimeoutInSeconds': (positive_integer, False), - 'HECEndpoint': (basestring, True), - 'HECEndpointType': (basestring, True), - 'HECToken': (basestring, True), - 'ProcessingConfiguration': (ProcessingConfiguration, False), - 'RetryOptions': (SplunkRetryOptions, False), - 'S3BackupMode': (basestring, False), - 'S3Configuration': (S3DestinationConfiguration, True), - } - - -class DeliveryStream(AWSObject): - resource_type = "AWS::KinesisFirehose::DeliveryStream" - - props = { - 'DeliveryStreamName': (basestring, False), - 'DeliveryStreamType': (delivery_stream_type_validator, False), - 'ElasticsearchDestinationConfiguration': (ElasticsearchDestinationConfiguration, False), # noqa - 'ExtendedS3DestinationConfiguration': (ExtendedS3DestinationConfiguration, False), # noqa - 'KinesisStreamSourceConfiguration': (KinesisStreamSourceConfiguration, False), # noqa - 'RedshiftDestinationConfiguration': (RedshiftDestinationConfiguration, False), # noqa - 'S3DestinationConfiguration': (S3DestinationConfiguration, False), - 'SplunkDestinationConfiguration': - (SplunkDestinationConfiguration, False), - } diff --git a/troposphere/glue.py b/troposphere/glue.py deleted file mode 100644 index 381ba4005..000000000 --- a/troposphere/glue.py +++ /dev/null @@ -1,370 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, double, integer_range, positive_integer - - -class GrokClassifier(AWSProperty): - props = { - 'Classification': (basestring, True), - 'CustomPatterns': (basestring, False), - 'GrokPattern': (basestring, True), - 'Name': (basestring, False), - } - - -class JsonClassifier(AWSProperty): - props = { - 'JsonPath': (basestring, True), - 'Name': (basestring, False), - } - - -class XMLClassifier(AWSProperty): - props = { - 'Classification': (basestring, True), - 'Name': (basestring, False), - 'RowTag': (basestring, True), - } - - -class Classifier(AWSObject): - resource_type = "AWS::Glue::Classifier" - - props = { - 'GrokClassifier': (GrokClassifier, False), - 'JsonClassifier': (JsonClassifier, False), - 'XMLClassifier': (XMLClassifier, False), - } - - -class PhysicalConnectionRequirements(AWSProperty): - props = { - 'AvailabilityZone': (basestring, True), - 'SecurityGroupIdList': ([basestring], True), - 'SubnetId': (basestring, True), - } - - -def connection_type_validator(type): - valid_types = [ - 'JDBC', - 'SFTP', - ] - if type not in valid_types: - raise ValueError('% is not a valid value for ConnectionType' % type) - return type - - -class ConnectionInput(AWSProperty): - props = { - 'ConnectionProperties': (dict, True), - 'ConnectionType': (connection_type_validator, True), - 'Description': (basestring, False), - 'MatchCriteria': ([basestring], True), - 'Name': (basestring, False), - 'PhysicalConnectionRequirements': - (PhysicalConnectionRequirements, True), - } - - -class Connection(AWSObject): - resource_type = "AWS::Glue::Connection" - - props = { - 'CatalogId': (basestring, True), - 'ConnectionInput': (ConnectionInput, True), - } - - -class Schedule(AWSProperty): - props = { - 'ScheduleExpression': (basestring, False), - } - - -def delete_behavior_validator(value): - valid_values = [ - 'LOG', - 'DELETE_FROM_DATABASE', - 'DEPRECATE_IN_DATABASE', - ] - if value not in valid_values: - raise ValueError('% is not a valid value for DeleteBehavior' % value) - return value - - -def update_behavior_validator(value): - valid_values = [ - 'LOG', - 'UPDATE_IN_DATABASE', - ] - if value not in valid_values: - raise ValueError('% is not a valid value for UpdateBehavior' % value) - return value - - -class SchemaChangePolicy(AWSProperty): - props = { - 'DeleteBehavior': (delete_behavior_validator, False), - 'UpdateBehavior': (update_behavior_validator, False), - } - - -class JdbcTarget(AWSProperty): - props = { - 'ConnectionName': (basestring, False), - 'Exclusions': ([basestring], False), - 'Path': (basestring, False), - } - - -class S3Target(AWSProperty): - props = { - 'Exclusions': ([basestring], False), - 'Path': (basestring, False), - } - - -class Targets(AWSProperty): - props = { - 'JdbcTargets': ([JdbcTarget], False), - 'S3Targets': ([S3Target], False), - } - - -class Crawler(AWSObject): - resource_type = "AWS::Glue::Crawler" - - props = { - 'Classifiers': ([basestring], False), - 'Configuration': (basestring, False), - 'DatabaseName': (basestring, True), - 'Description': (basestring, False), - 'Name': (basestring, False), - 'Role': (basestring, True), - 'Schedule': (Schedule, False), - 'SchemaChangePolicy': (SchemaChangePolicy, False), - 'TablePrefix': (basestring, False), - 'Targets': (Targets, True), - } - - -class DatabaseInput(AWSProperty): - props = { - 'Description': (basestring, False), - 'LocationUri': (basestring, False), - 'Name': (basestring, False), - 'Parameters': (dict, False), - } - - -class Database(AWSObject): - resource_type = "AWS::Glue::Database" - - props = { - 'CatalogId': (basestring, True), - 'DatabaseInput': (DatabaseInput, True), - } - - -class DevEndpoint(AWSObject): - resource_type = "AWS::Glue::DevEndpoint" - - props = { - 'EndpointName': (basestring, False), - 'ExtraJarsS3Path': (basestring, False), - 'ExtraPythonLibsS3Path': (basestring, False), - 'NumberOfNodes': (positive_integer, False), - 'PublicKey': (basestring, True), - 'RoleArn': (basestring, True), - 'SecurityGroupIds': ([basestring], False), - 'SubnetId': (basestring, False), - } - - -class ConnectionsList(AWSProperty): - props = { - 'Connections': ([basestring], False), - } - - -class ExecutionProperty(AWSProperty): - props = { - 'MaxConcurrentRuns': (positive_integer, False), - } - - -class JobCommand(AWSProperty): - props = { - 'Name': (basestring, False), - 'ScriptLocation': (basestring, False), - } - - -class Job(AWSObject): - resource_type = "AWS::Glue::Job" - - props = { - 'AllocatedCapacity': (double, False), - 'Command': (JobCommand, True), - 'Connections': (ConnectionsList, False), - 'DefaultArguments': (dict, False), - 'Description': (basestring, False), - 'ExecutionProperty': (ExecutionProperty, False), - 'LogUri': (basestring, False), - 'MaxRetries': (double, False), - 'Name': (basestring, False), - 'Role': (basestring, True), - } - - -class Column(AWSProperty): - props = { - 'Comment': (basestring, False), - 'Name': (basestring, True), - 'Type': (basestring, False), - } - - -class Order(AWSProperty): - props = { - 'Column': (basestring, True), - 'SortOrder': (integer_range(0, 1), False), - } - - -class SerdeInfo(AWSProperty): - props = { - 'Name': (basestring, False), - 'Parameters': (dict, False), - 'SerializationLibrary': (basestring, False), - } - - -class SkewedInfo(AWSProperty): - props = { - 'SkewedColumnNames': ([basestring], False), - 'SkewedColumnValues': ([basestring], False), - 'SkewedColumnValueLocationMaps': (dict, False), - } - - -class StorageDescriptor(AWSProperty): - props = { - 'BucketColumns': ([basestring], False), - 'Columns': ([Column], False), - 'Compressed': (boolean, False), - 'InputFormat': (basestring, False), - 'Location': (basestring, False), - 'NumberofBuckets': (positive_integer, False), - 'OutputFormat': (basestring, False), - 'Parameters': (dict, False), - 'SerdeInfo': (SerdeInfo, False), - 'SkewedInfo': (SkewedInfo, False), - 'SortColumns': ([Order], False), - 'StoredAsSubDirectories': (boolean, False), - } - - -class PartitionInput(AWSProperty): - props = { - 'Parameters': (dict, False), - 'StorageDescriptor': (StorageDescriptor, False), - 'Values': ([basestring], True), - } - - -class Partition(AWSObject): - resource_type = "AWS::Glue::Partition" - - props = { - 'CatalogId': (basestring, True), - 'DatabaseName': (basestring, True), - 'PartitionInput': (PartitionInput, True), - 'TableName': (basestring, True), - } - - -def table_type_validator(type): - valid_types = [ - 'EXTERNAL_TABLE', - 'VIRTUAL_VIEW', - ] - if type not in valid_types: - raise ValueError('% is not a valid value for TableType' % type) - return type - - -class TableInput(AWSProperty): - props = { - 'Description': (basestring, False), - 'Name': (basestring, True), - 'Owner': (basestring, False), - 'Parameters': (dict, False), - 'PartitionKeys': ([Column], False), - 'Retention': (positive_integer, False), - 'StorageDescriptor': (StorageDescriptor, False), - 'TableType': (table_type_validator, False), - 'ViewExpandedText': (basestring, False), - 'ViewOriginalText': (basestring, False), - } - - -class Table(AWSObject): - resource_type = "AWS::Glue::Table" - - props = { - 'CatalogId': (basestring, True), - 'DatabaseName': (basestring, True), - 'TableInput': (TableInput, True), - } - - -class Action(AWSProperty): - props = { - 'Arguments': (dict, False), - 'JobName': (basestring, False), - } - - -class Condition(AWSProperty): - props = { - 'JobName': (basestring, False), - 'LogicalOperator': (basestring, False), - 'State': (basestring, False), - } - - -class Predicate(AWSProperty): - props = { - 'Conditions': ([Condition], False), - 'Logical': (basestring, False), - } - - -def trigger_type_validator(type): - valid_types = [ - 'SCHEDULED', - 'CONDITIONAL', - 'ON_DEMAND', - ] - if type not in valid_types: - raise ValueError('% is not a valid value for Type' % type) - return type - - -class Trigger(AWSObject): - resource_type = "AWS::Glue::Trigger" - - props = { - 'Actions': ([Action], True), - 'Description': (basestring, False), - 'Name': (basestring, False), - 'Predicate': (Predicate, False), - 'Schedule': (basestring, False), - 'Type': (trigger_type_validator, True), - } diff --git a/troposphere/guardduty.py b/troposphere/guardduty.py deleted file mode 100644 index 2926d1d26..000000000 --- a/troposphere/guardduty.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -class Detector(AWSObject): - resource_type = 'AWS::GuardDuty::Detector' - - props = { - 'Enable': (boolean, True), - } - - -class Condition(AWSProperty): - props = { - 'Eq': ([basestring], False), - 'Gte': (integer, False), - 'Lt': (integer, False), - 'Lte': (integer, False), - 'Neq': ([basestring], False), - } - - -class FindingCriteria(AWSProperty): - props = { - 'Criterion': (dict, False), - 'ItemType': (Condition, False), - } - - -class Filter(AWSObject): - resource_type = "AWS::GuardDuty::Filter" - - props = { - 'Action': (basestring, True), - 'Description': (basestring, True), - 'DetectorId': (basestring, True), - 'FindingCriteria': (FindingCriteria, True), - 'Name': (basestring, False), - 'Rank': (integer, True), - } - - -class IPSet(AWSObject): - resource_type = 'AWS::GuardDuty::IPSet' - - props = { - 'Activate': (boolean, True), - 'DetectorId': (basestring, True), - 'Format': (basestring, True), - 'Location': (basestring, True), - 'Name': (basestring, False), - } - - -class Master(AWSObject): - resource_type = "AWS::GuardDuty::Master" - - props = { - 'DetectorId': (basestring, True), - 'InvitationId': (basestring, False), - 'MasterId': (basestring, True), - } - - -class Member(AWSObject): - resource_type = "AWS::GuardDuty::Member" - - props = { - 'DetectorId': (basestring, True), - 'Email': (basestring, True), - 'MemberId': (basestring, True), - 'Message': (basestring, False), - 'Status': (basestring, False), - 'DisableEmailNotification': (bool, False), - } - - -class ThreatIntelSet(AWSObject): - resource_type = 'AWS::GuardDuty::ThreatIntelSet' - - props = { - 'Activate': (boolean, True), - 'DetectorId': (basestring, True), - 'Format': (basestring, True), - 'Location': (basestring, True), - 'Name': (basestring, False), - } diff --git a/troposphere/helpers/userdata.py b/troposphere/helpers/userdata.py deleted file mode 100644 index 5d25d1366..000000000 --- a/troposphere/helpers/userdata.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/python - -from troposphere import Base64, Join - - -def from_file(filepath, delimiter='', blanklines=False): - """ - Imports userdata from a file. - - :type filepath: string - :param filepath - The absolute path to the file. - - :type delimiter: string - :param: delimiter - Delimiter to use with the troposphere.Join(). - - :type blanklines: boolean - :param blanklines - If blank lines shoud be ignored - - rtype: troposphere.Base64 - :return The base64 representation of the file. - """ - - data = [] - - try: - with open(filepath, 'r') as f: - for line in f: - if blanklines and line.strip('\n\r ') == '': - continue - - data.append(line) - except IOError: - raise IOError('Error opening or reading file: {}'.format(filepath)) - - return Base64(Join(delimiter, data)) diff --git a/troposphere/iam.py b/troposphere/iam.py deleted file mode 100644 index 3a2504ab6..000000000 --- a/troposphere/iam.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import integer, boolean, status -from .validators import iam_path, iam_role_name, iam_group_name, iam_user_name - -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -Active = "Active" -Inactive = "Inactive" - - -class AccessKey(AWSObject): - resource_type = "AWS::IAM::AccessKey" - - props = { - 'Serial': (integer, False), - 'Status': (status, False), - 'UserName': (basestring, True), - } - - -class PolicyType(AWSObject): - resource_type = "AWS::IAM::Policy" - - props = { - 'Groups': ([basestring], False), - 'PolicyDocument': (policytypes, True), - 'PolicyName': (basestring, True), - 'Roles': ([basestring], False), - 'Users': ([basestring], False), - } - - -class Policy(AWSProperty): - props = { - 'PolicyDocument': (policytypes, True), - 'PolicyName': (basestring, True), - } - - -PolicyProperty = Policy - - -class Group(AWSObject): - resource_type = "AWS::IAM::Group" - - props = { - 'GroupName': (iam_group_name, False), - 'ManagedPolicyArns': ([basestring], False), - 'Path': (iam_path, False), - 'Policies': ([Policy], False), - } - - -class InstanceProfile(AWSObject): - resource_type = "AWS::IAM::InstanceProfile" - - props = { - 'Path': (iam_path, False), - 'Roles': (list, True), - 'InstanceProfileName': (basestring, False), - } - - -class Role(AWSObject): - resource_type = "AWS::IAM::Role" - - props = { - 'AssumeRolePolicyDocument': (policytypes, True), - 'ManagedPolicyArns': ([basestring], False), - 'MaxSessionDuration': (integer, False), - 'Path': (iam_path, False), - 'PermissionsBoundary': (basestring, False), - 'Policies': ([Policy], False), - 'RoleName': (iam_role_name, False), - } - - -class ServiceLinkedRole(AWSObject): - resource_type = "AWS::IAM::ServiceLinkedRole" - - props = { - 'AWSServiceName': (basestring, True), - 'CustomSuffix': (basestring, False), - 'Description': (basestring, False), - } - - -class LoginProfile(AWSProperty): - props = { - 'Password': (basestring, True), - 'PasswordResetRequired': (boolean, False), - } - - -class User(AWSObject): - resource_type = "AWS::IAM::User" - - props = { - 'Groups': ([basestring], False), - 'LoginProfile': (LoginProfile, False), - 'ManagedPolicyArns': ([basestring], False), - 'Path': (iam_path, False), - 'PermissionsBoundary': (basestring, False), - 'Policies': ([Policy], False), - 'UserName': (iam_user_name, False), - } - - -class UserToGroupAddition(AWSObject): - resource_type = "AWS::IAM::UserToGroupAddition" - - props = { - 'GroupName': (basestring, True), - 'Users': (list, True), - } - - -class ManagedPolicy(AWSObject): - resource_type = "AWS::IAM::ManagedPolicy" - - props = { - 'Description': (basestring, False), - 'Groups': ([basestring], False), - 'ManagedPolicyName': (basestring, False), - 'Path': (iam_path, False), - 'PolicyDocument': (policytypes, True), - 'Roles': ([basestring], False), - 'Users': ([basestring], False), - } diff --git a/troposphere/inspector.py b/troposphere/inspector.py deleted file mode 100644 index 42c32ea0b..000000000 --- a/troposphere/inspector.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, Tags -from .validators import integer - - -class AssessmentTarget(AWSObject): - resource_type = "AWS::Inspector::AssessmentTarget" - - props = { - 'AssessmentTargetName': (basestring, False), - 'ResourceGroupArn': (basestring, False), - } - - -class AssessmentTemplate(AWSObject): - resource_type = "AWS::Inspector::AssessmentTemplate" - - props = { - 'AssessmentTargetArn': (basestring, True), - 'AssessmentTemplateName': (basestring, False), - 'DurationInSeconds': (integer, True), - 'RulesPackageArns': ([basestring], True), - 'UserAttributesForFindings': (Tags, False), - } - - -class ResourceGroup(AWSObject): - resource_type = "AWS::Inspector::ResourceGroup" - - props = { - 'ResourceGroupTags': (Tags, True), - } diff --git a/troposphere/iot.py b/troposphere/iot.py deleted file mode 100644 index feccdd30e..000000000 --- a/troposphere/iot.py +++ /dev/null @@ -1,198 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import boolean -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class CloudwatchAlarmAction(AWSProperty): - props = { - 'AlarmName': (basestring, True), - 'RoleArn': (basestring, True), - 'StateReason': (basestring, True), - 'StateValue': (basestring, True), - } - - -class CloudwatchMetricAction(AWSProperty): - props = { - 'MetricName': (basestring, True), - 'MetricNamespace': (basestring, True), - 'MetricTimestamp': (basestring, False), - 'MetricUnit': (basestring, True), - 'MetricValue': (basestring, True), - 'RoleArn': (basestring, True), - } - - -class DynamoDBAction(AWSProperty): - props = { - 'HashKeyField': (basestring, True), - 'HashKeyType': (basestring, False), - 'HashKeyValue': (basestring, True), - 'PayloadField': (basestring, False), - 'RangeKeyField': (basestring, False), - 'RangeKeyType': (basestring, False), - 'RangeKeyValue': (basestring, False), - 'RoleArn': (basestring, True), - 'TableName': (basestring, True), - } - - -class PutItemInput(AWSProperty): - props = { - 'TableName': (basestring, True), - } - - -class DynamoDBv2Action(AWSProperty): - props = { - 'PutItem': (PutItemInput, False), - 'RoleArn': (basestring, False), - } - - -class ElasticsearchAction(AWSProperty): - props = { - 'Endpoint': (basestring, True), - 'Id': (basestring, True), - 'Index': (basestring, True), - 'RoleArn': (basestring, True), - 'Type': (basestring, True), - } - - -class FirehoseAction(AWSProperty): - props = { - 'DeliveryStreamName': (basestring, True), - 'RoleArn': (basestring, True), - 'Separator': (basestring, False), - } - - -class KinesisAction(AWSProperty): - props = { - 'PartitionKey': (basestring, False), - 'RoleArn': (basestring, True), - 'StreamName': (basestring, True), - } - - -class LambdaAction(AWSProperty): - props = { - 'FunctionArn': (basestring, True), - } - - -class RepublishAction(AWSProperty): - props = { - 'RoleArn': (basestring, True), - 'Topic': (basestring, True), - } - - -class S3Action(AWSProperty): - props = { - 'BucketName': (basestring, True), - 'Key': (basestring, True), - 'RoleArn': (basestring, True), - } - - -class SnsAction(AWSProperty): - props = { - 'MessageFormat': (basestring, False), - 'RoleArn': (basestring, True), - 'TargetArn': (basestring, True), - } - - -class SqsAction(AWSProperty): - props = { - 'QueueUrl': (basestring, True), - 'RoleArn': (basestring, True), - 'UseBase64': (basestring, False), - } - - -class Action(AWSProperty): - props = { - 'CloudwatchAlarm': (CloudwatchAlarmAction, False), - 'CloudwatchMetric': (CloudwatchMetricAction, False), - 'DynamoDB': (DynamoDBAction, False), - 'DynamoDBv2': (DynamoDBv2Action, False), - 'Elasticsearch': (ElasticsearchAction, False), - 'Firehose': (FirehoseAction, False), - 'Kinesis': (KinesisAction, False), - 'Lambda': (LambdaAction, False), - 'Republish': (RepublishAction, False), - 'S3': (S3Action, False), - 'Sns': (SnsAction, False), - 'Sqs': (SqsAction, False), - } - - -class TopicRulePayload(AWSProperty): - props = { - 'Actions': ([Action], True), - 'AwsIotSqlVersion': (basestring, False), - 'Description': (basestring, False), - 'RuleDisabled': (boolean, True), - 'Sql': (basestring, True), - } - - -class TopicRule(AWSObject): - resource_type = "AWS::IoT::TopicRule" - - props = { - 'RuleName': (basestring, False), - 'TopicRulePayload': (TopicRulePayload, True), - } - - -class ThingPrincipalAttachment(AWSObject): - resource_type = "AWS::IoT::ThingPrincipalAttachment" - - props = { - 'Principal': (basestring, True), - 'ThingName': (basestring, True), - } - - -class Thing(AWSObject): - resource_type = "AWS::IoT::Thing" - - props = { - 'AttributePayload': (dict, False), - 'ThingName': (basestring, False), - } - - -class PolicyPrincipalAttachment(AWSObject): - resource_type = "AWS::IoT::PolicyPrincipalAttachment" - - props = { - 'PolicyName': (basestring, True), - 'Principal': (basestring, True), - } - - -class Policy(AWSObject): - resource_type = "AWS::IoT::Policy" - - props = { - 'PolicyDocument': (policytypes, True), - 'PolicyName': (basestring, False), - } - - -class Certificate(AWSObject): - resource_type = "AWS::IoT::Certificate" - - props = { - 'CertificateSigningRequest': (basestring, True), - 'Status': (basestring, True), - } diff --git a/troposphere/iot1click.py b/troposphere/iot1click.py deleted file mode 100644 index 0befae5de..000000000 --- a/troposphere/iot1click.py +++ /dev/null @@ -1,39 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import boolean, json_checker - - -class Device(AWSObject): - resource_type = "AWS::IoT1Click::Device", - - props = { - 'DeviceId': (basestring, True), - 'Enabled': (boolean, True), - } - - -class Placement(AWSObject): - resource_type = "AWS::IoT1Click::Placement" - - props = { - 'AssociatedDevices': (json_checker, False), - 'Attributes': (json_checker, False), - 'PlacementName': (basestring, False), - 'ProjectName': (basestring, True), - } - - -class PlacementTemplate(AWSProperty): - props = { - 'DefaultAttributes': (json_checker, False), - 'DeviceTemplates': (json_checker, False), - } - - -class Project(AWSObject): - resource_type = "AWS::IoT1Click::Project" - - props = { - 'Description': (basestring, False), - 'PlacementTemplate': (PlacementTemplate, True), - 'ProjectName': (basestring, False), - } diff --git a/troposphere/iotanalytics.py b/troposphere/iotanalytics.py deleted file mode 100644 index 8863c82ef..000000000 --- a/troposphere/iotanalytics.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer, json_checker, double -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class RetentionPeriod(AWSProperty): - props = { - 'NumberOfDays': (integer, False), - 'Unlimited': (boolean, False), - } - - -class Channel(AWSObject): - resource_type = "AWS::IoTAnalytics::Channel" - - props = { - 'ChannelName': (basestring, False), - 'RetentionPeriod': (RetentionPeriod, False), - 'Tags': ((Tags, list), False), - } - - -class AddAttributes(AWSProperty): - props = { - 'Attributes': (json_checker, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class ActivityChannel(AWSProperty): - props = { - 'ChannelName': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class Datastore(AWSProperty): - props = { - 'DatastoreName': (basestring, False), - 'Name': (basestring, False), - } - - -class DeviceRegistryEnrich(AWSProperty): - props = { - 'Attribute': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - 'RoleArn': (basestring, False), - 'ThingName': (basestring, False), - } - - -class DeviceShadowEnrich(AWSProperty): - props = { - 'Attribute': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - 'RoleArn': (basestring, False), - 'ThingName': (basestring, False), - } - - -class Filter(AWSProperty): - props = { - 'Filter': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class Lambda(AWSProperty): - props = { - 'BatchSize': (integer, False), - 'LambdaName': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class Math(AWSProperty): - props = { - 'Attribute': (basestring, False), - 'Math': (basestring, False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class RemoveAttributes(AWSProperty): - props = { - 'Attributes': ([basestring], False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class SelectAttributes(AWSProperty): - props = { - 'Attributes': ([basestring], False), - 'Name': (basestring, False), - 'Next': (basestring, False), - } - - -class Activity(AWSProperty): - props = { - 'AddAttributes': (AddAttributes, False), - 'Channel': (ActivityChannel, False), - 'Datastore': (Datastore, False), - 'DeviceRegistryEnrich': (DeviceRegistryEnrich, False), - 'DeviceShadowEnrich': (DeviceShadowEnrich, False), - 'Filter': (Filter, False), - 'Lambda': (Lambda, False), - 'Math': (Math, False), - 'RemoveAttributes': (RemoveAttributes, False), - 'SelectAttributes': (SelectAttributes, False), - } - - -class Pipeline(AWSObject): - resource_type = "AWS::IoTAnalytics::Pipeline" - - props = { - 'PipelineActivities': ([Activity], True), - 'PipelineName': (basestring, False), - 'Tags': ((Tags, list), False), - } - - -class RetentionPeriod(AWSProperty): - props = { - 'NumberOfDays': (integer, False), - 'Unlimited': (boolean, False), - } - - -class Datastore(AWSObject): - resource_type = "AWS::IoTAnalytics::Datastore" - - props = { - 'DatastoreName': (basestring, False), - 'RetentionPeriod': (RetentionPeriod, False), - 'Tags': ((Tags, list), False), - } - - -class ResourceConfiguration(AWSProperty): - props = { - 'ComputeType': (basestring, True), - 'VolumeSizeInGB': (integer, True), - } - - -class DatasetContentVersionValue(AWSProperty): - props = { - 'DatasetName': (basestring, False), - } - - -class OutputFileUriValue(AWSProperty): - props = { - 'FileName': (basestring, False), - } - - -class Variable(AWSProperty): - props = { - 'DatasetContentVersionValue': (DatasetContentVersionValue, False), - 'DoubleValue': (double, False), - 'OutputFileUriValue': (OutputFileUriValue, False), - 'StringValue': (basestring, False), - 'VariableName': (basestring, False) - } - - -class ContainerAction(AWSProperty): - props = { - 'ExecutionRoleArn': (basestring, True), - 'Image': (basestring, True), - 'ResourceConfiguration': (ResourceConfiguration, False), - 'Variables': ([Variable], False), - } - - -class DeltaTime(AWSProperty): - props = { - 'TimeExpression': (basestring, True), - 'OffsetSeconds': (integer, True), - } - - -class QueryActionFilter(AWSProperty): - props = { - 'DeltaTime': (DeltaTime, False), - } - - -class QueryAction(AWSProperty): - props = { - 'Filters': ([QueryActionFilter], False), - 'SqlQuery': (basestring, False), - } - - -class Action(AWSProperty): - props = { - 'ActionName': (basestring, True), - 'ContainerAction': (ContainerAction, False), - 'QueryAction': (QueryAction, False) - } - - -class Schedule(AWSProperty): - props = { - 'ScheduleExpression': (basestring, True), - } - - -class TriggeringDataset(AWSProperty): - props = { - 'DatasetName': (basestring, True), - } - - -class Trigger(AWSProperty): - props = { - 'Schedule': (Schedule, False), - 'TriggeringDataset': (TriggeringDataset, False), - } - - -class Dataset(AWSObject): - resource_type = "AWS::IoTAnalytics::Dataset" - - props = { - 'Actions': ([Action], True), - 'DatasetName': (basestring, False), - 'RetentionPeriod': (RetentionPeriod, False), - 'Tags': ((Tags, list), False), - 'Triggers': ([Trigger], False), - } diff --git a/troposphere/kinesis.py b/troposphere/kinesis.py deleted file mode 100644 index a2f4ae641..000000000 --- a/troposphere/kinesis.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2014, Guillem Anguera -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import integer - - -class StreamEncryption(AWSProperty): - props = { - 'EncryptionType': (basestring, True), - 'KeyId': (basestring, True), - } - - -class Stream(AWSObject): - resource_type = "AWS::Kinesis::Stream" - - props = { - 'Name': (basestring, False), - 'RetentionPeriodHours': (integer, False), - 'ShardCount': (integer, False), - 'StreamEncryption': (StreamEncryption, False), - 'Tags': ((Tags, list), False), - } - - -class StreamConsumer(AWSObject): - resource_type = "AWS::Kinesis::StreamConsumer" - - props = { - 'ConsumerName': (basestring, True), - 'StreamARN': (basestring, True), - } diff --git a/troposphere/kms.py b/troposphere/kms.py deleted file mode 100644 index 211503b42..000000000 --- a/troposphere/kms.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, Tags -from .validators import boolean, integer_range, key_usage_type -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class Alias(AWSObject): - resource_type = "AWS::KMS::Alias" - - props = { - 'AliasName': (basestring, True), - 'TargetKeyId': (basestring, True) - } - - -class Key(AWSObject): - resource_type = "AWS::KMS::Key" - - props = { - 'Description': (basestring, False), - 'Enabled': (boolean, False), - 'EnableKeyRotation': (boolean, False), - 'KeyPolicy': (policytypes, True), - 'KeyUsage': (key_usage_type, False), - 'PendingWindowInDays': (integer_range(7, 30), False), - 'Tags': ((Tags, list), False) - } diff --git a/troposphere/logs.py b/troposphere/logs.py deleted file mode 100644 index 2abd5b7b7..000000000 --- a/troposphere/logs.py +++ /dev/null @@ -1,62 +0,0 @@ -from . import AWSObject, AWSProperty -from .validators import integer_list_item -from .constants import LOGS_ALLOWED_RETENTION_DAYS as RETENTION_DAYS - - -class Destination(AWSObject): - resource_type = "AWS::Logs::Destination" - - props = { - 'DestinationName': (basestring, True), - 'DestinationPolicy': (basestring, True), - 'RoleArn': (basestring, True), - 'TargetArn': (basestring, True), - } - - -class LogGroup(AWSObject): - resource_type = "AWS::Logs::LogGroup" - - props = { - 'LogGroupName': (basestring, False), - 'RetentionInDays': (integer_list_item(RETENTION_DAYS), False), - } - - -class LogStream(AWSObject): - resource_type = "AWS::Logs::LogStream" - - props = { - 'LogGroupName': (basestring, True), - 'LogStreamName': (basestring, False) - } - - -class MetricTransformation(AWSProperty): - props = { - 'DefaultValue': (float, False), - 'MetricName': (basestring, True), - 'MetricNamespace': (basestring, True), - 'MetricValue': (basestring, True), - } - - -class MetricFilter(AWSObject): - resource_type = "AWS::Logs::MetricFilter" - - props = { - 'FilterPattern': (basestring, True), - 'LogGroupName': (basestring, True), - 'MetricTransformations': ([MetricTransformation], True), - } - - -class SubscriptionFilter(AWSObject): - resource_type = "AWS::Logs::SubscriptionFilter" - - props = { - 'DestinationArn': (basestring, True), - 'FilterPattern': (basestring, True), - 'LogGroupName': (basestring, True), - 'RoleArn': (basestring, False), - } diff --git a/troposphere/neptune.py b/troposphere/neptune.py deleted file mode 100644 index 98885e997..000000000 --- a/troposphere/neptune.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject -from .validators import boolean, integer -from troposphere import Tags - - -class DBClusterParameterGroup(AWSObject): - resource_type = "AWS::Neptune::DBClusterParameterGroup" - - props = { - 'Description': (basestring, True), - 'Family': (basestring, True), - 'Name': (basestring, False), - 'Parameters': (dict, True), - 'Tags': (Tags, False), - } - - -class DBCluster(AWSObject): - resource_type = "AWS::Neptune::DBCluster" - - props = { - 'AvailabilityZones': ([basestring], False), - 'BackupRetentionPeriod': (integer, False), - 'DBClusterIdentifier': (basestring, False), - 'DBClusterParameterGroupName': (basestring, False), - 'DBSubnetGroupName': (basestring, False), - 'IamAuthEnabled': (boolean, False), - 'KmsKeyId': (basestring, False), - 'Port': (integer, False), - 'PreferredBackupWindow': (basestring, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'SnapshotIdentifier': (basestring, False), - 'StorageEncrypted': (boolean, False), - 'Tags': (Tags, False), - 'VpcSecurityGroupIds': ([basestring], False), - } - - -class DBInstance(AWSObject): - resource_type = "AWS::Neptune::DBInstance" - - props = { - 'AllowMajorVersionUpgrade': (boolean, False), - 'AutoMinorVersionUpgrade': (boolean, False), - 'AvailabilityZone': (basestring, False), - 'DBClusterIdentifier': (basestring, False), - 'DBInstanceClass': (basestring, True), - 'DBInstanceIdentifier': (basestring, False), - 'DBParameterGroupName': (basestring, False), - 'DBSnapshotIdentifier': (basestring, False), - 'DBSubnetGroupName': (basestring, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'Tags': (Tags, False), - } - - -class DBParameterGroup(AWSObject): - resource_type = "AWS::Neptune::DBParameterGroup" - - props = { - 'Description': (basestring, True), - 'Family': (basestring, True), - 'Name': (basestring, False), - 'Parameters': (dict, True), - 'Tags': (Tags, False), - } - - -class DBSubnetGroup(AWSObject): - resource_type = "AWS::Neptune::DBSubnetGroup" - - props = { - 'DBSubnetGroupDescription': (basestring, True), - 'DBSubnetGroupName': (basestring, False), - 'SubnetIds': ([basestring], True), - 'Tags': (Tags, False), - } diff --git a/troposphere/openstack/heat.py b/troposphere/openstack/heat.py deleted file mode 100644 index 37b0c78bc..000000000 --- a/troposphere/openstack/heat.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# Copyright (c) 2014, Andy Botting -# All rights reserved. -# -# See LICENSE file for full license. - -from troposphere import AWSObject -from troposphere.validators import integer - - -# Due to the strange nature of the OpenStack compatability layer, some values -# that should be integers fail to validate and need to be represented as -# strings. For this reason, we duplicate the AWS::AutoScaling::AutoScalingGroup -# and change these types. -class AWSAutoScalingGroup(AWSObject): - resource_type = "AWS::AutoScaling::AutoScalingGroup" - - props = { - 'AvailabilityZones': (list, True), - 'Cooldown': (integer, False), - 'DesiredCapacity': (basestring, False), - 'HealthCheckGracePeriod': (integer, False), - 'HealthCheckType': (basestring, False), - 'LaunchConfigurationName': (basestring, True), - 'LoadBalancerNames': (list, False), - 'MaxSize': (basestring, True), - 'MinSize': (basestring, True), - 'Tags': (list, False), - 'VPCZoneIdentifier': (list, False), - } diff --git a/troposphere/openstack/neutron.py b/troposphere/openstack/neutron.py deleted file mode 100644 index de5ffff09..000000000 --- a/troposphere/openstack/neutron.py +++ /dev/null @@ -1,309 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# Copyright (c) 2014, Andy Botting -# All rights reserved. -# -# See LICENSE file for full license. - - -from troposphere import AWSObject, AWSProperty -from troposphere.validators import boolean, integer, integer_range -from troposphere.validators import network_port, positive_integer - - -class Firewall(AWSObject): - resource_type = "OS::Neutron::Firewall" - - props = { - 'admin_state_up': (boolean, False), - 'description': (basestring, False), - 'firewall_policy_id': (basestring, True), - 'name': (basestring, False), - } - - -class FirewallPolicy(AWSObject): - resource_type = "OS::Neutron::FirewallPolicy" - - props = { - 'audited': (boolean, False), - 'description': (basestring, False), - 'firewall_rules': (list, True), - 'name': (basestring, False), - 'shared': (boolean, False), - } - - -class FirewallRule(AWSObject): - resource_type = "OS::Neutron::FirewallRule" - - props = { - 'action': (basestring, False), - 'description': (basestring, False), - 'destination_ip_address': (basestring, False), - 'destination_port': (network_port, False), - 'enabled': (boolean, False), - 'ip_version': (basestring, False), - 'name': (basestring, False), - 'protocol': (basestring, False), - 'shared': (boolean, False), - 'source_ip_address': (basestring, False), - 'source_port': (network_port, False), - } - - def validate(self): - if 'action' in self.resource: - action = self.resource['action'] - if action not in ['allow', 'deny']: - raise ValueError( - "The action attribute must be " - "either allow or deny") - - if 'ip_version' in self.resource: - ip_version = self.resource['ip_version'] - if ip_version not in ['4', '6']: - raise ValueError( - "The ip_version attribute must be " - "either 4 or 6") - - if 'protocol' in self.resource: - protocol = self.resource['protocol'] - if protocol not in ['tcp', 'udp', 'icmp', None]: - raise ValueError( - "The protocol attribute must be " - "either tcp, udp, icmp or None") - - return True - - -class FloatingIP(AWSObject): - resource_type = "OS::Neutron::FloatingIP" - - props = { - 'fixed_ip_address': (basestring, False), - 'floating_network_id': (basestring, True), - 'port_id': (basestring, False), - 'value_specs': (dict, False), - } - - -class FloatingIPAssociation(AWSObject): - resource_type = "OS::Neutron::FloatingIPAssociation" - - props = { - 'fixed_ip_address': (basestring, False), - 'floatingip_id': (basestring, True), - 'port_id': (basestring, False), - } - - -class HealthMonitor(AWSObject): - resource_type = "OS::Neutron::HealthMonitor" - - props = { - 'admin_state_up': (boolean, False), - 'delay': (positive_integer, True), - 'expected_codes': (basestring, False), - 'http_method': (basestring, False), - 'max_retries': (integer, True), - 'timeout': (integer, True), - 'type': (basestring, True), - 'url_path': (basestring, False), - } - - def validate(self): - - if 'type' in self.resource: - mon_type = self.resource['type'] - if mon_type not in ['PING', 'TCP', 'HTTP', 'HTTPS']: - raise ValueError( - "The type attribute must be " - "either PING, TCP, HTTP or HTTPS") - - return True - - -class SessionPersistence(AWSProperty): - props = { - 'cookie_name': (basestring, False), - 'type': (basestring, False), - } - - def validate(self): - if 'type' in self.resource: - if 'cookie_name' not in self.resource: - raise ValueError( - "The cookie_name attribute must be " - "given if session type is APP_COOKIE") - - session_type = self.resource['type'] - if session_type not in ['SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE']: - raise ValueError( - "The type attribute must be " - "either SOURCE_IP, HTTP_COOKIE or APP_COOKIE") - - return True - - -class VIP(AWSProperty): - props = { - 'address': (basestring, False), - 'admin_state_up': (boolean, False), - 'connection_limit': (integer, True), - 'description': (basestring, False), - 'name': (basestring, False), - 'protocol_port': (network_port, True), - 'session_persistence': (SessionPersistence, False), - } - - -class Pool(AWSObject): - resource_type = "OS::Neutron::Pool" - - props = { - 'admin_state_up': (boolean, False), - 'description': (basestring, False), - 'lb_method': (basestring, True), - 'monitors': (list, False), - 'name': (basestring, False), - 'protocol': (basestring, True), - 'subnet_id': (basestring, True), - 'vip': (VIP, False), - } - - def validate(self): - - if 'lb_method' in self.resource: - lb_method = self.resource['lb_method'] - if lb_method not in ['ROUND_ROBIN', 'LEAST_CONNECTIONS', - 'SOURCE_IP']: - raise ValueError( - "The lb_method attribute must be " - "either ROUND_ROBIN, LEAST_CONNECTIONS " - "or SOURCE_IP") - - if 'protocol' in self.resource: - protocol = self.resource['protocol'] - if protocol not in ['TCP', 'HTTP', 'HTTPS']: - raise ValueError( - "The type attribute must be " - "either TCP, HTTP or HTTPS") - - return True - - -class LoadBalancer(AWSObject): - resource_type = "OS::Neutron::LoadBalancer" - - props = { - 'members': (list, False), - 'pool_id': (Pool, True), - 'protocol_port': (network_port, True), - } - - -class Net(AWSObject): - resource_type = "OS::Neutron::Net" - - props = { - 'admin_state_up': (boolean, False), - 'name': (basestring, False), - 'shared': (boolean, False), - 'tenant_id': (basestring, False), - 'value_specs': (dict, False), - } - - -class PoolMember(AWSObject): - resource_type = "OS::Neutron::PoolMember" - - props = { - 'address': (basestring, True), - 'admin_state_up': (boolean, False), - 'pool_id': (Pool, True), - 'protocol_port': (network_port, True), - 'weight': (integer_range(0, 256), False), - } - - -class AddressPair(AWSProperty): - props = { - 'ip_address': (basestring, True), - 'mac_address': (basestring, False), - } - - -class FixedIP(AWSProperty): - props = { - 'ip_address': (basestring, False), - 'subnet_id': (basestring, False), - } - - -class Port(AWSObject): - resource_type = "OS::Neutron::Port" - - props = { - 'admin_state_up': (boolean, False), - 'allowed_address_pairs': (list, False), - 'device_id': (basestring, False), - 'fixed_ips': (list, False), - 'mac_address': (basestring, False), - 'name': (basestring, False), - 'network_id': (basestring, True), - 'security_groups': (list, False), - 'value_specs': (dict, False), - } - - -class SecurityGroup(AWSObject): - resource_type = "OS::Neutron::SecurityGroup" - - props = { - 'description': (basestring, True), - 'name': (basestring, False), - 'rules': (list, False), - } - - -class SecurityGroupRule(AWSProperty): - props = { - 'direction': (basestring, False), - 'ethertype': (basestring, False), - 'port_range_max': (network_port, False), - 'port_range_min': (network_port, False), - 'protocol': (basestring, False), - 'remote_group_id': (basestring, False), - 'remote_ip_prefix': (basestring, False), - 'remote_mode': (basestring, False), - } - - def validate(self): - if 'direction' in self.resource: - direction = self.resource['direction'] - if direction not in ['ingress', 'egress']: - raise ValueError( - "The direction attribute must be " - "either ingress or egress") - - if 'ethertype' in self.resource: - ethertype = self.resource['ethertype'] - if ethertype not in ['IPv4', 'IPv6']: - raise ValueError( - "The ethertype attribute must be " - "either IPv4 or IPv6") - - if 'protocol' in self.resource: - protocol = self.resource['protocol'] - if protocol not in ['tcp', 'udp', 'icmp']: - raise ValueError( - "The protocol attribute must be " - "either tcp, udp or icmp") - - if 'remote_mode' in self.resource: - remote_mode = self.resource['remote_mode'] - if remote_mode not in ['remote_ip_prefix', 'remote_group_id']: - raise ValueError( - "The remote_mode attribute must be " - "either remote_ip_prefix or remote_group_id") - - return True diff --git a/troposphere/openstack/nova.py b/troposphere/openstack/nova.py deleted file mode 100644 index 200f630c4..000000000 --- a/troposphere/openstack/nova.py +++ /dev/null @@ -1,162 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# Copyright (c) 2014, Andy Botting -# All rights reserved. -# -# See LICENSE file for full license. - - -from troposphere import AWSObject, AWSProperty -from troposphere.validators import boolean, integer, network_port - - -class BlockDeviceMapping(AWSProperty): - props = { - 'delete_on_termination': (boolean, False), - 'device_name': (basestring, True), - 'snapshot_id': (basestring, False), - 'volume_id': (basestring, False), - 'volume_size': (integer, False), - } - - -class BlockDeviceMappingV2(AWSProperty): - props = { - 'boot_index': (integer, False), - 'delete_on_termination': (boolean, False), - 'device_name': (basestring, False), - 'device_type': (basestring, False), - 'disk_bus': (basestring, False), - 'ephemeral_format': (basestring, False), - 'ephemeral_size': (integer, False), - 'image_id': (basestring, False), - 'snapshot_id': (basestring, False), - 'swap_size': (integer, False), - 'volume_id': (basestring, False), - 'volume_size': (integer, False), - } - - def validate(self): - if 'device_type' in self.resource: - device_type = self.resource['device_type'] - if device_type not in ['cdrom', 'disk']: - raise ValueError( - "The device_type attribute " - "must be either cdrom or disk") - - if 'disk_bus' in self.resource: - disk_bus = self.resource['disk_bus'] - if disk_bus not in ['ide', 'lame_bus', 'scsi', 'usb', 'virtio']: - raise ValueError( - "The device_bus attribute " - "must be one of ide, lame_bus, scsi, usb or virtio") - - if 'ephemeral_format' in self.resource: - ephemeral_format = self.resource['ephemeral_format'] - if ephemeral_format not in ['ext2', 'ext3', 'ext4', 'xfs', 'ntfs']: - raise ValueError( - "The device_type attribute " - "must be one of ext2, ext3, ext4, xfs, ntfs") - - -class Network(AWSProperty): - props = { - 'fixed_ip': (basestring, False), - 'network': (basestring, False), - 'port': (network_port, False), - } - - -class FloatingIP(AWSObject): - resource_type = "OS::Nova::FloatingIP" - - props = { - 'pool': (basestring, False), - } - - -class FloatingIPAssociation(AWSObject): - resource_type = "OS::Nova::FloatingIPAssociation" - - props = { - 'floating_ip': (basestring, True), - 'server_ip': (basestring, True), - } - - -class KeyPair(AWSObject): - resource_type = "OS::Nova::KeyPair" - - props = { - 'name': (basestring, True), - 'public_key': (basestring, False), - 'save_private_key': (boolean, False), - } - - -class Server(AWSObject): - resource_type = "OS::Nova::Server" - - props = { - 'admin_pass': (basestring, False), - 'admin_user': (basestring, False), - 'availability_zone': (basestring, False), - 'block_device_mapping': (list, False), - 'block_device_mapping_v2': (list, False), - 'config_drive': (basestring, False), - 'diskConfig': (basestring, False), - 'flavor': (basestring, False), - 'flavor_update_policy': (basestring, False), - 'image': (basestring, True), - 'image_update_policy': (basestring, False), - 'key_name': (basestring, False), - 'metadata': (dict, False), - 'name': (basestring, False), - 'personality': (dict, False), - 'networks': (list, True), - 'reservation_id': (basestring, False), - 'scheduler_hints': (dict, False), - 'security_groups': (list, False), - 'software_config_transport': (basestring, False), - 'user_data': (basestring, False), - 'user_data_format': (basestring, False), - } - - def validate(self): - if 'diskConfig' in self.resource: - diskConfig = self.resource['diskConfig'] - if diskConfig not in ['AUTO', 'MANUAL']: - raise ValueError( - "The diskConfig attribute " - "must be either AUTO or MANUAL") - - if 'flavor_update_policy' in self.resource: - flavor_update_policy = self.resource['flavor_update_policy'] - if flavor_update_policy not in ['RESIZE', 'REPLACE']: - raise ValueError( - "The flavor_update_policy attribute " - "must be either RESIZE or REPLACE") - - if 'image_update_policy' in self.resource: - image_update_policy = self.resource['flavor_update_policy'] - if image_update_policy not in ['REBUILD', 'REPLACE', - 'REBUILD_PRESERVE_EPHEMERAL']: - raise ValueError( - "The image_update_policy attribute " - "must be either REBUILD, REPLACE or " - "REBUILD_PRESERVE_EPHEMERAL") - - if 'software_config_transport' in self.resource: - sct = self.resource['software_config_transport'] - if sct not in ['POLL_SERVER_CFN', 'POLL_SERVER_HEAT']: - raise ValueError( - "The software_config_transport attribute " - "must be either POLL_SERVER_CFN or POLL_SERVER_HEAT") - - if 'user_data_format' in self.resource: - user_data_format = self.resource['user_data_format'] - if user_data_format not in ['HEAT_CFNTOOLS', 'RAW']: - raise ValueError( - "The user_data_format attribute " - "must be either HEAT_CFNTOOLS or RAW") - - return True diff --git a/troposphere/opsworks.py b/troposphere/opsworks.py deleted file mode 100644 index ff30e83dd..000000000 --- a/troposphere/opsworks.py +++ /dev/null @@ -1,333 +0,0 @@ -# Copyright (c) 2014, Yuta Okamoto -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer, mutually_exclusive - - -class Source(AWSProperty): - props = { - 'Password': (basestring, False), - 'Revision': (basestring, False), - 'SshKey': (basestring, False), - 'Type': (basestring, False), - 'Url': (basestring, False), - 'Username': (basestring, False), - } - - -class SslConfiguration(AWSProperty): - props = { - 'Certificate': (basestring, True), - 'Chain': (basestring, False), - 'PrivateKey': (basestring, True), - } - - -class ChefConfiguration(AWSProperty): - props = { - 'BerkshelfVersion': (basestring, False), - 'ManageBerkshelf': (boolean, False), - } - - -class Recipes(AWSProperty): - props = { - 'Configure': ([basestring], False), - 'Deploy': ([basestring], False), - 'Setup': ([basestring], False), - 'Shutdown': ([basestring], False), - 'Undeploy': ([basestring], False), - } - - -def validate_volume_type(volume_type): - volume_types = ('standard', 'io1', 'gp2') - if volume_type not in volume_types: - raise ValueError("VolumeType (given: %s) must be one of: %s" % ( - volume_type, ', '.join(volume_types))) - return volume_type - - -class VolumeConfiguration(AWSProperty): - props = { - 'Encrypted': (boolean, False), - 'Iops': (integer, False), - 'MountPoint': (basestring, True), - 'NumberOfDisks': (integer, True), - 'RaidLevel': (integer, False), - 'Size': (integer, True), - 'VolumeType': (validate_volume_type, False) - } - - def validate(self): - volume_type = self.properties.get('VolumeType') - iops = self.properties.get('Iops') - if volume_type == 'io1' and not iops: - raise ValueError("Must specify Iops if VolumeType is 'io1'.") - if volume_type != 'io1' and iops: - raise ValueError("Cannot specify Iops if VolumeType is not 'io1'.") - - -class StackConfigurationManager(AWSProperty): - props = { - 'Name': (basestring, False), - 'Version': (basestring, False), - } - - -class TimeBasedAutoScaling(AWSProperty): - props = { - 'Monday': (dict, False), - 'Tuesday': (dict, False), - 'Wednesday': (dict, False), - 'Thursday': (dict, False), - 'Friday': (dict, False), - 'Saturday': (dict, False), - 'Sunday': (dict, False), - } - - -class AutoScalingThresholds(AWSProperty): - props = { - 'CpuThreshold': (float, False), - 'IgnoreMetricsTime': (integer, False), - 'InstanceCount': (integer, False), - 'LoadThreshold': (float, False), - 'MemoryThreshold': (float, False), - 'ThresholdsWaitTime': (integer, False), - } - - -class Environment(AWSProperty): - props = { - 'Key': (basestring, True), - 'Secure': (bool, False), - 'Value': (basestring, True), - } - - -class LoadBasedAutoScaling(AWSProperty): - props = { - 'DownScaling': (AutoScalingThresholds, False), - 'Enable': (bool, False), - 'UpScaling': (AutoScalingThresholds, False), - } - - -def validate_data_source_type(data_source_type): - data_source_types = ( - 'AutoSelectOpsworksMysqlInstance', - 'OpsworksMysqlInstance', - 'RdsDbInstance' - ) - if data_source_type not in data_source_types: - raise ValueError("Type (given: %s) must be one of: %s" % ( - data_source_type, ', '.join(data_source_types))) - return data_source_type - - -class DataSource(AWSProperty): - props = { - 'Arn': (basestring, False), - 'DatabaseName': (basestring, False), - 'Type': (validate_data_source_type, False) - } - - -class App(AWSObject): - resource_type = "AWS::OpsWorks::App" - - props = { - 'AppSource': (Source, False), - 'Attributes': (dict, False), - 'DataSources': ([DataSource], False), - 'Description': (basestring, False), - 'Domains': ([basestring], False), - 'EnableSsl': (boolean, False), - 'Environment': ([Environment], False), - 'Name': (basestring, True), - 'Shortname': (basestring, False), - 'SslConfiguration': (SslConfiguration, False), - 'StackId': (basestring, True), - 'Type': (basestring, True), - } - - -class ElasticLoadBalancerAttachment(AWSObject): - resource_type = "AWS::OpsWorks::ElasticLoadBalancerAttachment" - - props = { - 'ElasticLoadBalancerName': (basestring, True), - 'LayerId': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class EbsBlockDevice(AWSProperty): - props = { - 'DeleteOnTermination': (boolean, False), - 'Iops': (integer, False), - 'SnapshotId': (basestring, False), - 'VolumeSize': (integer, False), - 'VolumeType': (basestring, False), - } - - -class BlockDeviceMapping(AWSProperty): - props = { - 'DeviceName': (basestring, False), - 'Ebs': (EbsBlockDevice, False), - 'NoDevice': (basestring, False), - 'VirtualName': (basestring, False), - } - - def validate(self): - conds = [ - 'Ebs', - 'VirtualName', - ] - mutually_exclusive(self.__class__.__name__, self.properties, conds) - - -class Instance(AWSObject): - resource_type = "AWS::OpsWorks::Instance" - - props = { - 'AgentVersion': (basestring, False), - 'AmiId': (basestring, False), - 'Architecture': (basestring, False), - 'AutoScalingType': (basestring, False), - 'AvailabilityZone': (basestring, False), - 'BlockDeviceMappings': ([BlockDeviceMapping], False), - 'EbsOptimized': (boolean, False), - 'ElasticIps': ([basestring], False), - 'Hostname': (basestring, False), - 'InstallUpdatesOnBoot': (boolean, False), - 'InstanceType': (basestring, True), - 'LayerIds': ([basestring], True), - 'Os': (basestring, False), - 'RootDeviceType': (basestring, False), - 'SshKeyName': (basestring, False), - 'StackId': (basestring, True), - 'SubnetId': (basestring, False), - 'Tenancy': (basestring, False), - 'TimeBasedAutoScaling': (TimeBasedAutoScaling, False), - 'VirtualizationType': (basestring, False), - 'Volumes': ([basestring], False), - } - - -class ShutdownEventConfiguration(AWSProperty): - props = { - 'DelayUntilElbConnectionsDrained': (boolean, False), - 'ExecutionTimeout': (integer, False), - } - - -class LifeCycleConfiguration(AWSProperty): - props = { - 'ShutdownEventConfiguration': (ShutdownEventConfiguration, False), - } - - -class Layer(AWSObject): - resource_type = "AWS::OpsWorks::Layer" - - props = { - 'Attributes': (dict, False), - 'AutoAssignElasticIps': (boolean, True), - 'AutoAssignPublicIps': (boolean, True), - 'CustomInstanceProfileArn': (basestring, False), - 'CustomJson': ((basestring, dict), False), - 'CustomRecipes': (Recipes, False), - 'CustomSecurityGroupIds': ([basestring], False), - 'EnableAutoHealing': (boolean, True), - 'InstallUpdatesOnBoot': (boolean, False), - 'LifecycleEventConfiguration': (LifeCycleConfiguration, False), - 'LoadBasedAutoScaling': (LoadBasedAutoScaling, False), - 'Name': (basestring, True), - 'Packages': ([basestring], False), - 'Shortname': (basestring, True), - 'StackId': (basestring, True), - 'Type': (basestring, True), - 'VolumeConfigurations': ([VolumeConfiguration], False), - } - - -class RdsDbInstance(AWSProperty): - props = { - 'DbPassword': (basestring, True), - 'DbUser': (basestring, True), - 'RdsDbInstanceArn': (basestring, True) - } - - -class ElasticIp(AWSProperty): - props = { - 'Ip': (basestring, True), - 'Name': (basestring, False), - } - - -class Stack(AWSObject): - resource_type = "AWS::OpsWorks::Stack" - - props = { - 'AgentVersion': (basestring, False), - 'Attributes': (dict, False), - 'ChefConfiguration': (ChefConfiguration, False), - 'CloneAppIds': ([basestring], False), - 'ClonePermissions': (boolean, False), - 'ConfigurationManager': (StackConfigurationManager, False), - 'CustomCookbooksSource': (Source, False), - 'CustomJson': ((basestring, dict), False), - 'DefaultAvailabilityZone': (basestring, False), - 'DefaultInstanceProfileArn': (basestring, True), - 'DefaultOs': (basestring, False), - 'DefaultRootDeviceType': (basestring, False), - 'DefaultSshKeyName': (basestring, False), - 'DefaultSubnetId': (basestring, False), - 'EcsClusterArn': (basestring, False), - 'ElasticIps': ([ElasticIp], False), - 'HostnameTheme': (basestring, False), - 'Name': (basestring, True), - 'RdsDbInstances': ([RdsDbInstance], False), - 'ServiceRoleArn': (basestring, True), - 'SourceStackId': (basestring, False), - 'Tags': ((Tags, list), False), - 'UseCustomCookbooks': (boolean, False), - 'UseOpsworksSecurityGroups': (boolean, False), - 'VpcId': (basestring, False), - } - - def validate(self): - if 'VpcId' in self.properties and \ - 'DefaultSubnetId' not in self.properties: - raise ValueError('Using VpcId requires DefaultSubnetId to be' - 'specified') - return True - - -class UserProfile(AWSObject): - resource_type = "AWS::OpsWorks::UserProfile" - - props = { - 'AllowSelfManagement': (boolean, False), - 'IamUserArn': (basestring, True), - 'SshPublicKey': (basestring, False), - 'SshUsername': (basestring, False), - } - - -class Volume(AWSObject): - resource_type = "AWS::OpsWorks::Volume" - - props = { - 'Ec2VolumeId': (basestring, True), - 'MountPoint': (basestring, False), - 'Name': (basestring, False), - 'StackId': (basestring, True), - } diff --git a/troposphere/policies.py b/troposphere/policies.py deleted file mode 100644 index 1d0dcf512..000000000 --- a/troposphere/policies.py +++ /dev/null @@ -1,64 +0,0 @@ -from . import AWSProperty, AWSAttribute, validate_pausetime -from .validators import positive_integer, integer, boolean - - -class AutoScalingRollingUpdate(AWSProperty): - props = { - 'MaxBatchSize': (positive_integer, False), - 'MinInstancesInService': (integer, False), - 'MinSuccessfulInstancesPercent': (integer, False), - 'PauseTime': (validate_pausetime, False), - 'SuspendProcesses': ([basestring], False), - 'WaitOnResourceSignals': (boolean, False), - } - - -class AutoScalingScheduledAction(AWSProperty): - props = { - 'IgnoreUnmodifiedGroupSizeProperties': (boolean, False), - } - - -class AutoScalingReplacingUpdate(AWSProperty): - props = { - 'WillReplace': (boolean, False), - } - - -class CodeDeployLambdaAliasUpdate(AWSProperty): - props = { - 'AfterAllowTrafficHook': (basestring, False), - 'ApplicationName': (boolean, True), - 'BeforeAllowTrafficHook': (basestring, False), - 'DeploymentGroupName': (boolean, True), - } - - -class UpdatePolicy(AWSAttribute): - props = { - 'AutoScalingRollingUpdate': (AutoScalingRollingUpdate, False), - 'AutoScalingScheduledAction': (AutoScalingScheduledAction, False), - 'AutoScalingReplacingUpdate': (AutoScalingReplacingUpdate, False), - 'CodeDeployLambdaAliasUpdate': (CodeDeployLambdaAliasUpdate, False), - 'UseOnlineResharding': (boolean, False), - } - - -class ResourceSignal(AWSProperty): - props = { - 'Count': (positive_integer, False), - 'Timeout': (validate_pausetime, False), - } - - -class AutoScalingCreationPolicy(AWSProperty): - props = { - 'MinSuccessfulInstancesPercent': (integer, False), - } - - -class CreationPolicy(AWSAttribute): - props = { - 'AutoScalingCreationPolicy': (AutoScalingCreationPolicy, False), - 'ResourceSignal': (ResourceSignal, True), - } diff --git a/troposphere/rds.py b/troposphere/rds.py deleted file mode 100644 index 6379a0093..000000000 --- a/troposphere/rds.py +++ /dev/null @@ -1,429 +0,0 @@ -# Copyright (c) 2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -import re - -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import (boolean, network_port, integer, positive_integer, - integer_range) - -# Taken from: -# http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html - -VALID_STORAGE_TYPES = ('standard', 'gp2', 'io1') -VALID_DB_ENGINES = ('MySQL', 'mysql', 'oracle-se1', 'oracle-se2', 'oracle-se', - 'oracle-ee', 'sqlserver-ee', 'sqlserver-se', - 'sqlserver-ex', 'sqlserver-web', 'postgres', 'aurora', - 'aurora-mysql', 'aurora-postgresql', 'mariadb') -VALID_DB_ENGINE_MODES = ('provisioned', 'serverless') -VALID_LICENSE_MODELS = ('license-included', 'bring-your-own-license', - 'general-public-license', 'postgresql-license') -VALID_SCALING_CONFIGURATION_CAPACITIES = (2, 4, 8, 16, 32, 64, 128, 256) - - -def validate_iops(iops): - """DBInstance Iops validation rules.""" - - iops = integer(iops) - if int(iops) == 0: - return iops - if int(iops) < 1000: - raise ValueError("DBInstance Iops, if set, must be greater than 1000.") - return iops - - -def validate_storage_type(storage_type): - """Validate StorageType for DBInstance""" - - if storage_type not in VALID_STORAGE_TYPES: - raise ValueError("DBInstance StorageType must be one of: %s" % - ", ".join(VALID_STORAGE_TYPES)) - return storage_type - - -def validate_engine(engine): - """Validate database Engine for DBInstance """ - - if engine not in VALID_DB_ENGINES: - raise ValueError("DBInstance Engine must be one of: %s" % - ", ".join(VALID_DB_ENGINES)) - return engine - - -def validate_engine_mode(engine_mode): - """Validate database EngineMode for DBCluster""" - - if engine_mode not in VALID_DB_ENGINE_MODES: - raise ValueError("DBCluster EngineMode must be one of: %s" % - ", ".join(VALID_DB_ENGINE_MODES)) - return engine_mode - - -def validate_license_model(license_model): - """Validate LicenseModel for DBInstance""" - - if license_model not in VALID_LICENSE_MODELS: - raise ValueError("DBInstance LicenseModel must be one of: %s" % - ", ".join(VALID_LICENSE_MODELS)) - return license_model - - -def validate_backup_window(window): - """Validate PreferredBackupWindow for DBInstance""" - - hour = r'[01]?[0-9]|2[0-3]' - minute = r'[0-5][0-9]' - r = ("(?P%s):(?P%s)-" - "(?P%s):(?P%s)") % (hour, minute, hour, minute) - range_regex = re.compile(r) - m = range_regex.match(window) - if not m: - raise ValueError("DBInstance PreferredBackupWindow must be in the " - "format: hh24:mi-hh24:mi") - start_ts = (int(m.group('start_hour')) * 60) + int(m.group('start_minute')) - end_ts = (int(m.group('end_hour')) * 60) + int(m.group('end_minute')) - if abs(end_ts - start_ts) < 30: - raise ValueError("DBInstance PreferredBackupWindow must be at least " - "30 minutes long.") - return window - - -def validate_maintenance_window(window): - """Validate PreferredMaintenanceWindow for DBInstance""" - - days = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") - day_re = r'[A-Z]{1}[a-z]{2}' - hour = r'[01]?[0-9]|2[0-3]' - minute = r'[0-5][0-9]' - r = ("(?P%s):(?P%s):(?P%s)-" - "(?P%s):(?P%s):(?P%s)") % (day_re, - hour, - minute, - day_re, - hour, - minute) - range_regex = re.compile(r) - m = range_regex.match(window) - if not m: - raise ValueError("DBInstance PreferredMaintenanceWindow must be in " - "the format: ddd:hh24:mi-ddd:hh24:mi") - if m.group('start_day') not in days or m.group('end_day') not in days: - raise ValueError("DBInstance PreferredMaintenanceWindow day part of " - "ranges must be one of: %s" % ", ".join(days)) - start_ts = (days.index(m.group('start_day')) * 24 * 60) + \ - (int(m.group('start_hour')) * 60) + int(m.group('start_minute')) - end_ts = (days.index(m.group('end_day')) * 24 * 60) + \ - (int(m.group('end_hour')) * 60) + int(m.group('end_minute')) - if abs(end_ts - start_ts) < 30: - raise ValueError("DBInstance PreferredMaintenanceWindow must be at " - "least 30 minutes long.") - return window - - -def validate_backup_retention_period(days): - """Validate BackupRetentionPeriod for DBInstance""" - - days = positive_integer(days) - if int(days) > 35: - raise ValueError("DBInstance BackupRetentionPeriod cannot be larger " - "than 35 days.") - return days - - -def validate_capacity(capacity): - """Validate ScalingConfiguration capacity for serverless DBCluster""" - - if capacity not in VALID_SCALING_CONFIGURATION_CAPACITIES: - raise ValueError( - "ScalingConfiguration capacity must be one of: {}".format( - ", ".join(map( - str, - VALID_SCALING_CONFIGURATION_CAPACITIES - )) - ) - ) - return capacity - - -class ProcessorFeature(AWSProperty): - props = { - 'Name': (basestring, False), - 'Value': (basestring, False), - } - - -class DBInstance(AWSObject): - resource_type = "AWS::RDS::DBInstance" - - props = { - 'AllocatedStorage': (positive_integer, False), - 'AllowMajorVersionUpgrade': (boolean, False), - 'AutoMinorVersionUpgrade': (boolean, False), - 'AvailabilityZone': (basestring, False), - 'BackupRetentionPeriod': (validate_backup_retention_period, False), - 'CharacterSetName': (basestring, False), - 'CopyTagsToSnapshot': (boolean, False), - 'DBClusterIdentifier': (basestring, False), - 'DBInstanceClass': (basestring, True), - 'DBInstanceIdentifier': (basestring, False), - 'DBName': (basestring, False), - 'DBParameterGroupName': (basestring, False), - 'DBSecurityGroups': (list, False), - 'DBSnapshotIdentifier': (basestring, False), - 'DBSubnetGroupName': (basestring, False), - 'DeleteAutomatedBackups': (boolean, False), - 'DeletionProtection': (boolean, False), - 'Domain': (basestring, False), - 'DomainIAMRoleName': (basestring, False), - 'EnableCloudwatchLogsExports': ([basestring], False), - 'EnableIAMDatabaseAuthentication': (boolean, False), - 'EnablePerformanceInsights': (boolean, False), - 'Engine': (validate_engine, False), - 'EngineVersion': (basestring, False), - 'Iops': (validate_iops, False), - 'KmsKeyId': (basestring, False), - 'LicenseModel': (validate_license_model, False), - 'MasterUsername': (basestring, False), - 'MasterUserPassword': (basestring, False), - 'MonitoringInterval': (positive_integer, False), - 'MonitoringRoleArn': (basestring, False), - 'MultiAZ': (boolean, False), - 'OptionGroupName': (basestring, False), - 'PerformanceInsightsKMSKeyId': (basestring, False), - 'PerformanceInsightsRetentionPeriod': (positive_integer, False), - 'Port': (network_port, False), - 'PreferredBackupWindow': (validate_backup_window, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'ProcessorFeatures': ([ProcessorFeature], False), - 'PromotionTier': (positive_integer, False), - 'PubliclyAccessible': (boolean, False), - 'SourceDBInstanceIdentifier': (basestring, False), - 'SourceRegion': (basestring, False), - 'StorageEncrypted': (boolean, False), - 'StorageType': (basestring, False), - 'Tags': ((Tags, list), False), - 'Timezone': (basestring, False), - 'VPCSecurityGroups': ([basestring], False), - } - - def validate(self): - if 'DBSnapshotIdentifier' not in self.properties: - if 'Engine' not in self.properties: - raise ValueError( - 'Resource Engine is required in type %s' - % self.resource_type) - - if 'SourceDBInstanceIdentifier' in self.properties: - - invalid_replica_properties = ( - 'BackupRetentionPeriod', 'DBName', 'MasterUsername', - 'MasterUserPassword', 'PreferredBackupWindow', 'MultiAZ', - 'DBSnapshotIdentifier', - ) - - invalid_properties = [s for s in self.properties.keys() if - s in invalid_replica_properties] - - if invalid_properties: - raise ValueError( - ('{0} properties can\'t be provided when ' - 'SourceDBInstanceIdentifier is present ' - 'AWS::RDS::DBInstance.' - ).format(', '.join(sorted(invalid_properties)))) - - if ('DBSnapshotIdentifier' not in self.properties and - 'SourceDBInstanceIdentifier' not in self.properties) and \ - ('MasterUsername' not in self.properties or - 'MasterUserPassword' not in self.properties) and \ - ('DBClusterIdentifier' not in self.properties): - raise ValueError( - r"Either (MasterUsername and MasterUserPassword) or" - r" DBSnapshotIdentifier are required in type " - r"AWS::RDS::DBInstance." - ) - - if 'KmsKeyId' in self.properties and \ - 'StorageEncrypted' not in self.properties: - raise ValueError( - 'If KmsKeyId is provided, StorageEncrypted is required ' - 'AWS::RDS::DBInstance.' - ) - - nonetype = type(None) - avail_zone = self.properties.get('AvailabilityZone', None) - multi_az = self.properties.get('MultiAZ', None) - if not (isinstance(avail_zone, (AWSHelperFn, nonetype)) and - isinstance(multi_az, (AWSHelperFn, nonetype))): - if avail_zone and multi_az in [True, 1, '1', 'true', 'True']: - raise ValueError("AvailabiltyZone cannot be set on " - "DBInstance if MultiAZ is set to true.") - - storage_type = self.properties.get('StorageType', None) - if storage_type and storage_type == 'io1' and \ - 'Iops' not in self.properties: - raise ValueError("Must specify Iops if using StorageType io1") - - allocated_storage = self.properties.get('AllocatedStorage') - iops = self.properties.get('Iops', None) - if iops and not isinstance(iops, AWSHelperFn): - if not isinstance(allocated_storage, AWSHelperFn) and \ - int(allocated_storage) < 100: - raise ValueError("AllocatedStorage must be at least 100 when " - "Iops is set.") - if not isinstance(allocated_storage, AWSHelperFn) and not \ - isinstance(iops, AWSHelperFn) and \ - float(iops) / float(allocated_storage) > 50.0: - raise ValueError("AllocatedStorage must be no less than " - "1/50th the provisioned Iops") - - return True - - -class DBParameterGroup(AWSObject): - resource_type = "AWS::RDS::DBParameterGroup" - - props = { - 'Description': (basestring, False), - 'Family': (basestring, False), - 'Parameters': (dict, False), - 'Tags': ((Tags, list), False), - } - - -class DBSubnetGroup(AWSObject): - resource_type = "AWS::RDS::DBSubnetGroup" - - props = { - 'DBSubnetGroupDescription': (basestring, True), - 'DBSubnetGroupName': (basestring, False), - 'SubnetIds': (list, True), - 'Tags': ((Tags, list), False), - } - - -class RDSSecurityGroup(AWSProperty): - props = { - 'CIDRIP': (basestring, False), - 'EC2SecurityGroupId': (basestring, False), - 'EC2SecurityGroupName': (basestring, False), - 'EC2SecurityGroupOwnerId': (basestring, False), - } - - -class DBSecurityGroup(AWSObject): - resource_type = "AWS::RDS::DBSecurityGroup" - - props = { - 'EC2VpcId': (basestring, False), - 'DBSecurityGroupIngress': (list, True), - 'GroupDescription': (basestring, True), - 'Tags': ((Tags, list), False), - } - - -class DBSecurityGroupIngress(AWSObject): - resource_type = "AWS::RDS::DBSecurityGroupIngress" - - props = { - 'CIDRIP': (basestring, False), - 'DBSecurityGroupName': (basestring, True), - 'EC2SecurityGroupId': (basestring, False), - 'EC2SecurityGroupName': (basestring, False), - 'EC2SecurityGroupOwnerId': (basestring, False), - } - - -class EventSubscription(AWSObject): - resource_type = "AWS::RDS::EventSubscription" - - props = { - 'Enabled': (boolean, False), - 'EventCategories': ([basestring], False), - 'SnsTopicArn': (basestring, True), - 'SourceIds': ([basestring], False), - 'SourceType': (basestring, False), - } - - -class OptionSetting(AWSProperty): - props = { - 'Name': (basestring, False), - 'Value': (basestring, False), - } - - -class OptionConfiguration(AWSProperty): - props = { - 'DBSecurityGroupMemberships': ([basestring], False), - 'OptionName': (basestring, True), - 'OptionSettings': ([OptionSetting], False), - 'OptionVersion': (basestring, False), - 'Port': (network_port, False), - 'VpcSecurityGroupMemberships': ([basestring], False), - } - - -class OptionGroup(AWSObject): - resource_type = "AWS::RDS::OptionGroup" - - props = { - 'EngineName': (basestring, True), - 'MajorEngineVersion': (basestring, True), - 'OptionGroupDescription': (basestring, True), - 'OptionConfigurations': ([OptionConfiguration], True), - 'Tags': ((Tags, list), False), - } - - -class DBClusterParameterGroup(AWSObject): - resource_type = "AWS::RDS::DBClusterParameterGroup" - - props = { - 'Description': (basestring, True), - 'Family': (basestring, True), - 'Parameters': (dict, False), - 'Tags': ((Tags, list), False), - } - - -class ScalingConfiguration(AWSProperty): - props = { - 'AutoPause': (boolean, False), - 'MaxCapacity': (validate_capacity, False), - 'MinCapacity': (validate_capacity, False), - 'SecondsUntilAutoPause': (positive_integer, False), - } - - -class DBCluster(AWSObject): - resource_type = "AWS::RDS::DBCluster" - - props = { - 'AvailabilityZones': ([basestring], False), - 'BacktrackWindow': (integer_range(0, 259200), False), - 'BackupRetentionPeriod': (validate_backup_retention_period, False), - 'DatabaseName': (basestring, False), - 'DBClusterIdentifier': (basestring, False), - 'DBClusterParameterGroupName': (basestring, False), - 'DBSubnetGroupName': (basestring, False), - 'DeletionProtection': (boolean, False), - 'EnableCloudwatchLogsExports': ([basestring], False), - 'EnableIAMDatabaseAuthentication': (boolean, False), - 'Engine': (validate_engine, True), - 'EngineMode': (validate_engine_mode, False), - 'EngineVersion': (basestring, False), - 'KmsKeyId': (basestring, False), - 'MasterUsername': (basestring, False), - 'MasterUserPassword': (basestring, False), - 'Port': (network_port, False), - 'PreferredBackupWindow': (validate_backup_window, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'ReplicationSourceIdentifier': (basestring, False), - 'ScalingConfiguration': (ScalingConfiguration, False), - 'SnapshotIdentifier': (basestring, False), - 'StorageEncrypted': (boolean, False), - 'Tags': ((Tags, list), False), - 'VpcSecurityGroupIds': ([basestring], False), - } diff --git a/troposphere/redshift.py b/troposphere/redshift.py deleted file mode 100644 index beaf0df2e..000000000 --- a/troposphere/redshift.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) 2014, Guillem Anguera -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer - - -class LoggingProperties(AWSProperty): - props = { - 'BucketName': (basestring, True), - 'S3KeyPrefix': (basestring, False), - } - - -class Cluster(AWSObject): - resource_type = "AWS::Redshift::Cluster" - - props = { - 'AllowVersionUpgrade': (boolean, False), - 'AutomatedSnapshotRetentionPeriod': (integer, False), - 'AvailabilityZone': (basestring, False), - 'ClusterIdentifier': (basestring, False), - 'ClusterParameterGroupName': (basestring, False), - 'ClusterSecurityGroups': (list, False), - 'ClusterSubnetGroupName': (basestring, False), - 'ClusterType': (basestring, True), - 'ClusterVersion': (basestring, False), - 'DBName': (basestring, True), - 'ElasticIp': (basestring, False), - 'Encrypted': (boolean, False), - 'HsmClientCertificateIdentifier': (basestring, False), - 'HsmConfigurationIdentifier': (basestring, False), - 'IamRoles': ([basestring], False), - 'KmsKeyId': (basestring, False), - 'LoggingProperties': (LoggingProperties, False), - 'MasterUsername': (basestring, True), - 'MasterUserPassword': (basestring, True), - 'NodeType': (basestring, True), - 'NumberOfNodes': (integer, False), # Conditional - 'OwnerAccount': (basestring, False), - 'Port': (integer, False), - 'PreferredMaintenanceWindow': (basestring, False), - 'PubliclyAccessible': (boolean, False), - 'SnapshotClusterIdentifier': (basestring, False), - 'SnapshotIdentifier': (basestring, False), - 'Tags': (Tags, False), - 'VpcSecurityGroupIds': (list, False), - } - - -class AmazonRedshiftParameter(AWSProperty): - props = { - 'ParameterName': (basestring, True), - 'ParameterValue': (basestring, True), - } - - -class ClusterParameterGroup(AWSObject): - resource_type = "AWS::Redshift::ClusterParameterGroup" - - props = { - 'Description': (basestring, True), - 'ParameterGroupFamily': (basestring, True), - 'Parameters': ([AmazonRedshiftParameter], False), - 'Tags': (Tags, False), - } - - -class ClusterSecurityGroup(AWSObject): - resource_type = "AWS::Redshift::ClusterSecurityGroup" - - props = { - 'Description': (basestring, True), - 'Tags': (Tags, False), - } - - -class ClusterSecurityGroupIngress(AWSObject): - resource_type = "AWS::Redshift::ClusterSecurityGroupIngress" - - props = { - 'ClusterSecurityGroupName': (basestring, True), - 'CIDRIP': (basestring, False), - 'EC2SecurityGroupName': (basestring, False), - 'EC2SecurityGroupOwnerId': (basestring, False), - } - - -class ClusterSubnetGroup(AWSObject): - resource_type = "AWS::Redshift::ClusterSubnetGroup" - - props = { - 'Description': (basestring, True), - 'SubnetIds': (list, True), - 'Tags': (Tags, False), - } diff --git a/troposphere/route53.py b/troposphere/route53.py deleted file mode 100644 index c6eb727bb..000000000 --- a/troposphere/route53.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import integer, positive_integer, network_port, boolean - - -VALID_RULETYPES = ('SYSTEM', 'FORWARD') - - -def validate_ruletype(ruletype): - """Validate RuleType for ResolverRule.""" - - if ruletype not in VALID_RULETYPES: - raise ValueError("Rule type must be one of: %s" % - ", ".join(VALID_RULETYPES)) - return ruletype - - -class AliasTarget(AWSProperty): - props = { - 'HostedZoneId': (basestring, True), - 'DNSName': (basestring, True), - 'EvaluateTargetHealth': (boolean, False) - } - - def __init__(self, - hostedzoneid=None, - dnsname=None, - evaluatetargethealth=None, - **kwargs): - # provided for backward compatibility - if hostedzoneid is not None: - kwargs['HostedZoneId'] = hostedzoneid - if dnsname is not None: - kwargs['DNSName'] = dnsname - if evaluatetargethealth is not None: - kwargs['EvaluateTargetHealth'] = evaluatetargethealth - super(AliasTarget, self).__init__(**kwargs) - - -class GeoLocation(AWSProperty): - props = { - 'ContinentCode': (basestring, False), - 'CountryCode': (basestring, False), - 'SubdivisionCode': (basestring, False), - } - - -class BaseRecordSet(object): - props = { - 'AliasTarget': (AliasTarget, False), - 'Comment': (basestring, False), - 'Failover': (basestring, False), - 'GeoLocation': (GeoLocation, False), - 'HealthCheckId': (basestring, False), - 'HostedZoneId': (basestring, False), - 'HostedZoneName': (basestring, False), - 'MultiValueAnswer': (boolean, False), - 'Name': (basestring, True), - 'Region': (basestring, False), - 'ResourceRecords': (list, False), - 'SetIdentifier': (basestring, False), - 'TTL': (integer, False), - 'Type': (basestring, True), - 'Weight': (integer, False), - } - - -class RecordSetType(AWSObject, BaseRecordSet): - # This is a top-level resource - resource_type = "AWS::Route53::RecordSet" - - -class RecordSet(AWSProperty, BaseRecordSet): - # This is for use in a list with RecordSetGroup (below) - pass - - -class RecordSetGroup(AWSObject): - resource_type = "AWS::Route53::RecordSetGroup" - - props = { - 'HostedZoneId': (basestring, False), - 'HostedZoneName': (basestring, False), - 'RecordSets': (list, False), - 'Comment': (basestring, False), - } - - -class AlarmIdentifier(AWSProperty): - props = { - 'Name': (basestring, True), - 'Region': (basestring, True), - } - - -class HealthCheckConfiguration(AWSProperty): - props = { - 'AlarmIdentifier': (AlarmIdentifier, False), - 'ChildHealthChecks': ([basestring], False), - 'EnableSNI': (boolean, False), - 'FailureThreshold': (positive_integer, False), - 'FullyQualifiedDomainName': (basestring, False), - 'HealthThreshold': (positive_integer, False), - 'InsufficientDataHealthStatus': (basestring, False), - 'Inverted': (boolean, False), - 'IPAddress': (basestring, False), - 'MeasureLatency': (boolean, False), - 'Port': (network_port, False), - 'Regions': ([basestring], False), - 'RequestInterval': (positive_integer, False), - 'ResourcePath': (basestring, False), - 'SearchString': (basestring, False), - 'Type': (basestring, True), - } - - -class HealthCheck(AWSObject): - resource_type = "AWS::Route53::HealthCheck" - - props = { - 'HealthCheckConfig': (HealthCheckConfiguration, True), - 'HealthCheckTags': (Tags, False), - } - - -class HostedZoneConfiguration(AWSProperty): - props = { - 'Comment': (basestring, False), - } - - -class HostedZoneVPCs(AWSProperty): - props = { - 'VPCId': (basestring, True), - 'VPCRegion': (basestring, True), - } - - -class QueryLoggingConfig(AWSProperty): - props = { - 'CloudWatchLogsLogGroupArn': (basestring, True), - } - - -class HostedZone(AWSObject): - resource_type = "AWS::Route53::HostedZone" - - props = { - 'HostedZoneConfig': (HostedZoneConfiguration, False), - 'HostedZoneTags': (Tags, False), - 'Name': (basestring, True), - 'QueryLoggingConfig': (QueryLoggingConfig, False), - 'VPCs': ([HostedZoneVPCs], False), - } - - -class IpAddressRequest(AWSProperty): - props = { - 'Ip': (basestring, False), - 'SubnetId': (basestring, True), - } - - -class ResolverEndpoint(AWSObject): - resource_type = "AWS::Route53Resolver::ResolverEndpoint" - - props = { - 'Direction': (basestring, True), - 'IpAddresses': ([IpAddressRequest], True), - 'Name': (basestring, False), - 'SecurityGroupIds': ([basestring], True), - 'Tags': (Tags, False), - } - - -class TargetAddress(AWSProperty): - props = { - 'Ip': (basestring, True), - 'Port': (basestring, True), - } - - -class ResolverRule(AWSObject): - resource_type = "AWS::Route53Resolver::ResolverRule" - - props = { - 'DomainName': (basestring, True), - 'Name': (basestring, False), - 'ResolverEndpointId': (basestring, False), - 'RuleType': (validate_ruletype, True), - 'Tags': (Tags, False), - 'TargetIps': ([TargetAddress], False), - } - - -class ResolverRuleAssociation(AWSObject): - resource_type = "AWS::Route53Resolver::ResolverRuleAssociation" - - props = { - 'Name': (basestring, False), - 'ResolverRuleId': (basestring, True), - 'VPCId': (basestring, True), - } diff --git a/troposphere/s3.py b/troposphere/s3.py deleted file mode 100644 index b852528a1..000000000 --- a/troposphere/s3.py +++ /dev/null @@ -1,423 +0,0 @@ -# Copyright (c) 2013, Bob Van Zant -# All rights reserved. -# -# See LICENSE file for full license. -import warnings - -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import boolean, positive_integer, s3_bucket_name -from .validators import s3_transfer_acceleration_status - -try: - from awacs.aws import Policy - - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - -Private = "Private" -PublicRead = "PublicRead" -PublicReadWrite = "PublicReadWrite" -AuthenticatedRead = "AuthenticatedRead" -BucketOwnerRead = "BucketOwnerRead" -BucketOwnerFullControl = "BucketOwnerFullControl" -LogDeliveryWrite = "LogDeliveryWrite" - - -class CorsRules(AWSProperty): - props = { - 'AllowedHeaders': ([basestring], False), - 'AllowedMethods': ([basestring], True), - 'AllowedOrigins': ([basestring], True), - 'ExposedHeaders': ([basestring], False), - 'Id': (basestring, False), - 'MaxAge': (positive_integer, False), - } - - -class CorsConfiguration(AWSProperty): - props = { - 'CorsRules': ([CorsRules], True), - } - - -class VersioningConfiguration(AWSProperty): - props = { - 'Status': (basestring, False), - } - - -class AccelerateConfiguration(AWSProperty): - props = { - 'AccelerationStatus': (s3_transfer_acceleration_status, True), - } - - -class RedirectAllRequestsTo(AWSProperty): - props = { - 'HostName': (basestring, True), - 'Protocol': (basestring, False), - } - - -class RedirectRule(AWSProperty): - props = { - 'HostName': (basestring, False), - 'HttpRedirectCode': (basestring, False), - 'Protocol': (basestring, False), - 'ReplaceKeyPrefixWith': (basestring, False), - 'ReplaceKeyWith': (basestring, False), - } - - -class RoutingRuleCondition(AWSProperty): - props = { - 'HttpErrorCodeReturnedEquals': (basestring, False), - 'KeyPrefixEquals': (basestring, False), - } - - -class RoutingRule(AWSProperty): - props = { - 'RedirectRule': (RedirectRule, True), - 'RoutingRuleCondition': (RoutingRuleCondition, False), - } - - -class WebsiteConfiguration(AWSProperty): - props = { - 'IndexDocument': (basestring, False), - 'ErrorDocument': (basestring, False), - 'RedirectAllRequestsTo': (RedirectAllRequestsTo, False), - 'RoutingRules': ([RoutingRule], False), - } - - -class LifecycleRuleTransition(AWSProperty): - props = { - 'StorageClass': (basestring, True), - 'TransitionDate': (basestring, False), - 'TransitionInDays': (positive_integer, False), - } - - -class AbortIncompleteMultipartUpload(AWSProperty): - props = { - 'DaysAfterInitiation': (positive_integer, True), - } - - -class NoncurrentVersionTransition(AWSProperty): - props = { - 'StorageClass': (basestring, True), - 'TransitionInDays': (positive_integer, True), - } - - -class TagFilter(AWSProperty): - props = { - 'Key': (basestring, True), - 'Value': (basestring, True), - } - - -class LifecycleRule(AWSProperty): - props = { - 'AbortIncompleteMultipartUpload': - (AbortIncompleteMultipartUpload, False), - 'ExpirationDate': (basestring, False), - 'ExpirationInDays': (positive_integer, False), - 'Id': (basestring, False), - 'NoncurrentVersionExpirationInDays': (positive_integer, False), - 'NoncurrentVersionTransition': (NoncurrentVersionTransition, False), - 'NoncurrentVersionTransitions': ([NoncurrentVersionTransition], False), - 'Prefix': (basestring, False), - 'Status': (basestring, True), - 'TagFilters': ([TagFilter], False), - 'Transition': (LifecycleRuleTransition, False), - 'Transitions': ([LifecycleRuleTransition], False) - } - - def validate(self): - if 'Transition' in self.properties: - if 'Transitions' not in self.properties: - # aws moved from a single transition to a list of them - # and deprecated 'Transition', so let's just move it to - # the new property and not annoy the user. - self.properties['Transitions'] = [ - self.properties.pop('Transition')] - else: - raise ValueError( - 'Cannot specify both "Transition" and "Transitions" ' - 'properties on S3 Bucket Lifecycle Rule. Please use ' - '"Transitions" since the former has been deprecated.') - - if 'NoncurrentVersionTransition' in self.properties: - if 'NoncurrentVersionTransitions' not in self.properties: - warnings.warn( - 'NoncurrentVersionTransition has been deprecated in ' - 'favour of NoncurrentVersionTransitions.' - ) - # Translate the old transition format to the new format - self.properties['NoncurrentVersionTransitions'] = [ - self.properties.pop('NoncurrentVersionTransition')] - else: - raise ValueError( - 'Cannot specify both "NoncurrentVersionTransition" and ' - '"NoncurrentVersionTransitions" properties on S3 Bucket ' - 'Lifecycle Rule. Please use ' - '"NoncurrentVersionTransitions" since the former has been ' - 'deprecated.') - - if 'ExpirationInDays' in self.properties and 'ExpirationDate' in \ - self.properties: - raise ValueError( - 'Cannot specify both "ExpirationDate" and "ExpirationInDays"' - ) - - -class LifecycleConfiguration(AWSProperty): - props = { - 'Rules': ([LifecycleRule], True), - } - - -class LoggingConfiguration(AWSProperty): - props = { - 'DestinationBucketName': (s3_bucket_name, False), - 'LogFilePrefix': (basestring, False), - } - - -class Rules(AWSProperty): - props = { - 'Name': (basestring, True), - 'Value': (basestring, True) - } - - -class S3Key(AWSProperty): - props = { - 'Rules': ([Rules], True) - } - - -class Filter(AWSProperty): - props = { - 'S3Key': (S3Key, True) - } - - -class LambdaConfigurations(AWSProperty): - props = { - 'Event': (basestring, True), - 'Filter': (Filter, False), - 'Function': (basestring, True), - } - - -class QueueConfigurations(AWSProperty): - props = { - 'Event': (basestring, True), - 'Filter': (Filter, False), - 'Queue': (basestring, True), - } - - -class TopicConfigurations(AWSProperty): - props = { - 'Event': (basestring, True), - 'Filter': (Filter, False), - 'Topic': (basestring, True), - } - - -class MetricsConfiguration(AWSProperty): - props = { - 'Id': (basestring, True), - 'Prefix': (basestring, False), - 'TagFilters': ([TagFilter], False), - } - - -class NotificationConfiguration(AWSProperty): - props = { - 'LambdaConfigurations': ([LambdaConfigurations], False), - 'QueueConfigurations': ([QueueConfigurations], False), - 'TopicConfigurations': ([TopicConfigurations], False), - } - - -class AccessControlTranslation(AWSProperty): - props = { - 'Owner': (basestring, True), - } - - -class EncryptionConfiguration(AWSProperty): - props = { - 'ReplicaKmsKeyID': (basestring, True), - } - - -class ReplicationConfigurationRulesDestination(AWSProperty): - props = { - 'AccessControlTranslation': (AccessControlTranslation, False), - 'Account': (basestring, False), - 'Bucket': (basestring, True), - 'EncryptionConfiguration': (EncryptionConfiguration, False), - 'StorageClass': (basestring, False), - } - - -class SseKmsEncryptedObjects(AWSProperty): - props = { - 'Status': (basestring, True), - } - - -class SourceSelectionCriteria(AWSProperty): - props = { - 'SseKmsEncryptedObjects': (SseKmsEncryptedObjects, True), - } - - -class ReplicationConfigurationRules(AWSProperty): - props = { - 'Destination': (ReplicationConfigurationRulesDestination, True), - 'Id': (basestring, False), - 'Prefix': (basestring, True), - 'SourceSelectionCriteria': (SourceSelectionCriteria, False), - 'Status': (basestring, True) - } - - -class ReplicationConfiguration(AWSProperty): - props = { - 'Role': (basestring, True), - 'Rules': ([ReplicationConfigurationRules], True) - } - - -class Destination(AWSProperty): - props = { - 'BucketAccountId': (basestring, False), - 'BucketArn': (basestring, True), - 'Format': (basestring, True), - 'Prefix': (basestring, False), - } - - -class DataExport(AWSProperty): - props = { - 'Destination': (Destination, True), - 'OutputSchemaVersion': (basestring, True), - } - - -class StorageClassAnalysis(AWSProperty): - props = { - 'DataExport': (DataExport, False), - } - - -class AnalyticsConfiguration(AWSProperty): - props = { - 'Id': (basestring, True), - 'Prefix': (basestring, False), - 'StorageClassAnalysis': (StorageClassAnalysis, True), - 'TagFilters': ([TagFilter], False), - } - - -class ServerSideEncryptionByDefault(AWSProperty): - props = { - 'KMSMasterKeyID': (basestring, False), - 'SSEAlgorithm': (basestring, True), - } - - -class ServerSideEncryptionRule(AWSProperty): - props = { - 'ServerSideEncryptionByDefault': - (ServerSideEncryptionByDefault, False), - } - - -class BucketEncryption(AWSProperty): - props = { - 'ServerSideEncryptionConfiguration': - ([ServerSideEncryptionRule], True), - } - - -class InventoryConfiguration(AWSProperty): - props = { - 'Destination': (Destination, True), - 'Enabled': (boolean, True), - 'Id': (basestring, True), - 'IncludedObjectVersions': (basestring, True), - 'OptionalFields': ([basestring], True), - 'Prefix': (basestring, False), - 'ScheduleFrequency': (basestring, True), - } - - -class PublicAccessBlockConfiguration(AWSProperty): - props = { - 'BlockPublicAcls': (boolean, False), - 'BlockPublicPolicy': (boolean, False), - 'IgnorePublicAcls': (boolean, False), - 'RestrictPublicBuckets': (boolean, False), - } - - -class Bucket(AWSObject): - resource_type = "AWS::S3::Bucket" - - props = { - 'AccessControl': (basestring, False), - 'AccelerateConfiguration': (AccelerateConfiguration, False), - 'AnalyticsConfigurations': ([AnalyticsConfiguration], False), - 'BucketEncryption': (BucketEncryption, False), - 'BucketName': (s3_bucket_name, False), - 'CorsConfiguration': (CorsConfiguration, False), - 'InventoryConfigurations': ([InventoryConfiguration], False), - 'LifecycleConfiguration': (LifecycleConfiguration, False), - 'LoggingConfiguration': (LoggingConfiguration, False), - 'MetricsConfigurations': ([MetricsConfiguration], False), - 'NotificationConfiguration': (NotificationConfiguration, False), - 'PublicAccessBlockConfiguration': (PublicAccessBlockConfiguration, - False), - 'ReplicationConfiguration': (ReplicationConfiguration, False), - 'Tags': (Tags, False), - 'WebsiteConfiguration': (WebsiteConfiguration, False), - 'VersioningConfiguration': (VersioningConfiguration, False) - } - - access_control_types = [ - Private, - PublicRead, - PublicReadWrite, - AuthenticatedRead, - BucketOwnerRead, - BucketOwnerFullControl, - LogDeliveryWrite, - ] - - def validate(self): - access_control = self.properties.get('AccessControl') - if access_control is not None and \ - not isinstance(access_control, AWSHelperFn): - if access_control not in self.access_control_types: - raise ValueError('AccessControl must be one of "%s"' % ( - ', '.join(self.access_control_types))) - - -class BucketPolicy(AWSObject): - resource_type = "AWS::S3::BucketPolicy" - - props = { - 'Bucket': (basestring, True), - 'PolicyDocument': (policytypes, True), - } diff --git a/troposphere/sagemaker.py b/troposphere/sagemaker.py deleted file mode 100644 index 6de9320de..000000000 --- a/troposphere/sagemaker.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import integer - - -class Endpoint(AWSObject): - resource_type = "AWS::SageMaker::Endpoint" - - props = { - 'EndpointName': (basestring, False), - 'EndpointConfigName': (basestring, True), - 'Tags': (Tags, True) - } - - -class ProductionVariant(AWSProperty): - props = { - 'ModelName': (basestring, True), - 'VariantName': (basestring, True), - 'InitialInstanceCount': (integer, True), - 'InstanceType': (basestring, True), - 'InitialVariantWeight': (float, True) - } - - -class EndpointConfig(AWSObject): - resource_type = "AWS::SageMaker::EndpointConfig" - - props = { - 'EndpointConfigName': (basestring, False), - 'ProductionVariants': ([ProductionVariant], True), - 'KmsKeyId': (basestring, False), - 'Tags': (Tags, True) - } - - -class ContainerDefinition(AWSProperty): - props = { - 'ContainerHostname': (basestring, False), - 'Environment': (dict, False), - 'ModelDataUrl': (basestring, False), - 'Image': (basestring, True) - } - - -class VpcConfig(AWSProperty): - props = { - 'Subnets': ([basestring], True), - 'SecurityGroupIds': ([basestring], True) - } - - -class Model(AWSObject): - resource_type = "AWS::SageMaker::Model" - - props = { - 'ExecutionRoleArn': (basestring, True), - 'PrimaryContainer': (ContainerDefinition, True), - 'Containers': ([ContainerDefinition], False), - 'ModelName': (basestring, False), - 'VpcConfig': (VpcConfig, False), - 'Tags': (Tags, False) - } - - -class NotebookInstanceLifecycleHook(AWSProperty): - props = { - 'Content': (basestring, False) - } - - -class NotebookInstanceLifecycleConfig(AWSObject): - resource_type = "AWS::SageMaker::NotebookInstanceLifecycleConfig" - - props = { - 'NotebookInstanceLifecycleConfigName': (basestring, False), - 'OnCreate': ([NotebookInstanceLifecycleHook], False), - 'OnStart': ([NotebookInstanceLifecycleHook], False) - } - - -class NotebookInstance(AWSObject): - resource_type = "AWS::SageMaker::NotebookInstance" - - props = { - 'KmsKeyId': (basestring, False), - 'DirectInternetAccess': (basestring, False), - 'SubnetId': (basestring, False), - 'NotebookInstanceName': (basestring, False), - 'InstanceType': (basestring, True), - 'LifecycleConfigName': (basestring, False), - 'SecurityGroupIds': ([basestring], False), - 'RoleArn': (basestring, True), - 'Tags': (Tags, False) - } diff --git a/troposphere/sdb.py b/troposphere/sdb.py deleted file mode 100644 index 31eca5054..000000000 --- a/troposphere/sdb.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject - - -class Domain(AWSObject): - resource_type = "AWS::SDB::Domain" - - props = {} diff --git a/troposphere/secretsmanager.py b/troposphere/secretsmanager.py deleted file mode 100644 index 1dd53da6b..000000000 --- a/troposphere/secretsmanager.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import integer, boolean - -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -VALID_TARGET_TYPES = ('AWS::RDS::DBInstance', 'AWS::RDS::DBCluster') - - -def validate_target_types(target_type): - """Target types validation rule.""" - - if target_type not in VALID_TARGET_TYPES: - raise ValueError("Target type must be one of : %s" % - ", ".join(VALID_TARGET_TYPES)) - return target_type - - -class ResourcePolicy(AWSObject): - resource_type = "AWS::SecretsManager::ResourcePolicy" - - props = { - 'SecretId': (basestring, True), - 'ResourcePolicy': (policytypes, True), - } - - -class RotationRules(AWSProperty): - props = { - 'AutomaticallyAfterDays': (integer, False), - } - - -class RotationSchedule(AWSObject): - resource_type = "AWS::SecretsManager::RotationSchedule" - - props = { - 'SecretId': (basestring, True), - 'RotationLambdaARN': (basestring, True), - 'RotationRules': (RotationRules, False) - } - - -class SecretTargetAttachment(AWSObject): - resource_type = "AWS::SecretsManager::SecretTargetAttachment" - - props = { - 'SecretId': (basestring, True), - 'TargetId': (basestring, True), - 'TargetType': (validate_target_types, True), - } - - -class GenerateSecretString(AWSProperty): - props = { - 'ExcludeUppercase': (boolean, False), - 'RequireEachIncludedType': (boolean, False), - 'IncludeSpace': (boolean, False), - 'ExcludeCharacters': (basestring, False), - 'GenerateStringKey': (basestring, False), - 'PasswordLength': (integer, False), - 'ExcludePunctuation': (boolean, False), - 'ExcludeLowercase': (boolean, False), - 'SecretStringTemplate': (basestring, False), - 'ExcludeNumbers': (boolean, False), - } - - -class Secret(AWSObject): - resource_type = "AWS::SecretsManager::Secret" - - props = { - 'Description': (basestring, False), - 'KmsKeyId': (basestring, False), - 'SecretString': (basestring, False), - 'GenerateSecretString': (GenerateSecretString, False), - 'Name': (basestring, False), - 'Tags': ((Tags, list), False), - } diff --git a/troposphere/serverless.py b/troposphere/serverless.py deleted file mode 100644 index 7c85af86f..000000000 --- a/troposphere/serverless.py +++ /dev/null @@ -1,342 +0,0 @@ -# Copyright (c) 2017, Fernando Freire -# All rights reserved. -# -# See LICENSE file for full license. - -import types - -from . import AWSObject, AWSProperty -from .apigateway import AccessLogSetting, CanarySetting, MethodSetting -from .awslambda import Environment, VPCConfig, validate_memory_size -from .dynamodb import ProvisionedThroughput, SSESpecification -from .s3 import Filter -from .validators import exactly_one, positive_integer -try: - from awacs.aws import PolicyDocument - policytypes = (dict, list, basestring, PolicyDocument) -except ImportError: - policytypes = (dict, list, basestring) - -assert types # silence pyflakes - - -def primary_key_type_validator(x): - valid_types = ["String", "Number", "Binary"] - if x not in valid_types: - raise ValueError("KeyType must be one of: %s" % ", ".join(valid_types)) - return x - - -class DeadLetterQueue(AWSProperty): - props = { - 'Type': (basestring, False), - 'TargetArn': (basestring, False) - } - - def validate(self): - valid_types = ['SQS', 'SNS'] - if ('Type' in self.properties and - self.properties['Type'] not in valid_types): - raise ValueError('Type must be either SQS or SNS') - - -class S3Location(AWSProperty): - props = { - "Bucket": (basestring, True), - "Key": (basestring, True), - "Version": (basestring, False), - } - - -class Hooks(AWSProperty): - props = { - "PreTraffic": (basestring, False), - "PostTraffic": (basestring, False), - } - - -class DeploymentPreference(AWSProperty): - props = { - "Type": (basestring, True), - "Alarms": (list, False), - "Hooks": (Hooks, False), - "Enabled": (bool, False), - } - - -class Function(AWSObject): - resource_type = "AWS::Serverless::Function" - - props = { - 'Handler': (basestring, True), - 'Runtime': (basestring, True), - 'CodeUri': ((S3Location, basestring), False), - 'InlineCode': (basestring, False), - 'FunctionName': (basestring, False), - 'Description': (basestring, False), - 'MemorySize': (validate_memory_size, False), - 'Timeout': (positive_integer, False), - 'Role': (basestring, False), - 'Policies': (policytypes, False), - 'Environment': (Environment, False), - 'VpcConfig': (VPCConfig, False), - 'Events': (dict, False), - 'Tags': (dict, False), - 'Tracing': (basestring, False), - 'KmsKeyArn': (basestring, False), - 'DeadLetterQueue': (DeadLetterQueue, False), - 'DeploymentPreference': (DeploymentPreference, False), - 'Layers': ([basestring], False), - 'AutoPublishAlias': (basestring, False), - 'ReservedConcurrentExecutions': (positive_integer, False), - } - - def validate(self): - conds = [ - 'CodeUri', - 'InlineCode', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class FunctionForPackaging(Function): - """Render Function without requiring 'CodeUri'. - - This exception to the Function spec is for use with the - `cloudformation/sam package` commands which add CodeUri automatically. - """ - - resource_type = Function.resource_type - props = Function.props.copy() - props['CodeUri'] = (props['CodeUri'][0], False) - - def validate(self): - pass - - -class CognitoAuthIdentity(AWSProperty): - props = { - 'Header': (basestring, False), - 'ValidationExpression': (basestring, False), - } - - -class LambdaTokenAuthIdentity(AWSProperty): - props = { - 'Header': (basestring, False), - 'ValidationExpression': (basestring, False), - 'ReauthorizeEvery': (basestring, False), - } - - -class LambdaRequestAuthIdentity(AWSProperty): - props = { - 'Headers': ([basestring], False), - 'QueryStrings': ([basestring], False), - 'StageVariables': ([basestring], False), - 'Context': ([basestring], False), - 'ReauthorizeEvery': (basestring, False), - } - - -class CognitoAuth(AWSProperty): - props = { - 'UserPoolArn': (basestring, False), - 'Identity': (CognitoAuthIdentity, False), - } - - -class LambdaTokenAuth(AWSProperty): - props = { - 'FunctionPayloadType': (basestring, False), - 'FunctionArn': (basestring, False), - 'FunctionInvokeRole': (basestring, False), - 'Identity': (LambdaTokenAuthIdentity, False), - } - - -class LambdaRequestAuth(AWSProperty): - props = { - 'FunctionPayloadType': (basestring, False), - 'FunctionArn': (basestring, False), - 'FunctionInvokeRole': (basestring, False), - 'Identity': (LambdaRequestAuthIdentity, False), - } - - -class Authorizers(AWSProperty): - props = { - 'DefaultAuthorizer': (basestring, False), - 'CognitoAuth': (CognitoAuth, False), - 'LambdaTokenAuth': (LambdaTokenAuth, False), - 'LambdaRequestAuth': (LambdaRequestAuth, False), - } - - -class Auth(AWSProperty): - props = { - 'DefaultAuthorizer': (basestring, False), - 'Authorizers': (Authorizers, False), - } - - -class Cors(AWSProperty): - props = { - 'AllowCredentials': (basestring, False), - 'AllowHeaders': (basestring, False), - 'AllowMethods': (basestring, False), - 'AllowOrigin': (basestring, True), - 'MaxAge': (basestring, False), - } - - -class Api(AWSObject): - resource_type = "AWS::Serverless::Api" - - props = { - 'AccessLogSetting': (AccessLogSetting, False), - 'Auth': (Auth, False), - 'BinaryMediaTypes': ([basestring], False), - 'CacheClusterEnabled': (bool, False), - 'CacheClusterSize': (basestring, False), - 'CanarySetting': (CanarySetting, False), - 'Cors': ((basestring, Cors), False), - 'DefinitionBody': (dict, False), - 'DefinitionUri': (basestring, False), - 'EndpointConfiguration': (basestring, False), - 'MethodSetting': (MethodSetting, False), - 'Name': (basestring, False), - 'StageName': (basestring, True), - "TracingEnabled": (bool, False), - 'Variables': (dict, False), - } - - def validate(self): - conds = [ - 'DefinitionBody', - 'DefinitionUri', - ] - exactly_one(self.__class__.__name__, self.properties, conds) - - -class PrimaryKey(AWSProperty): - props = { - 'Name': (basestring, False), - 'Type': (primary_key_type_validator, False) - } - - -class SimpleTable(AWSObject): - resource_type = "AWS::Serverless::SimpleTable" - - props = { - 'PrimaryKey': (PrimaryKey, False), - 'ProvisionedThroughput': (ProvisionedThroughput, False), - 'SSESpecification': (SSESpecification, False), - 'Tags': (dict, False), - 'TableName': (basestring, False), - } - - -class S3Event(AWSObject): - resource_type = 'S3' - - props = { - 'Bucket': (basestring, True), - 'Events': (list, True), - 'Filter': (Filter, False) - } - - -class SNSEvent(AWSObject): - resource_type = 'SNS' - - props = { - 'Topic': (basestring, True) - } - - -def starting_position_validator(x): - valid_types = ['TRIM_HORIZON', 'LATEST'] - if x not in valid_types: - raise ValueError( - "StartingPosition must be one of: %s" - % ", ".join(valid_types) - ) - return x - - -class KinesisEvent(AWSObject): - resource_type = 'Kinesis' - - props = { - 'Stream': (basestring, True), - 'StartingPosition': (starting_position_validator, True), - 'BatchSize': (positive_integer, False) - } - - -class DynamoDBEvent(AWSObject): - resource_type = 'DynamoDB' - - props = { - 'Stream': (basestring, True), - 'StartingPosition': (starting_position_validator, True), - 'BatchSize': (positive_integer, False) - } - - -class ApiEvent(AWSObject): - resource_type = 'Api' - - props = { - 'Path': (basestring, True), - 'Method': (basestring, True), - 'RestApiId': (basestring, False) - } - - -class ScheduleEvent(AWSObject): - resource_type = 'Schedule' - - props = { - 'Schedule': (basestring, True), - 'Input': (basestring, False) - } - - -class CloudWatchEvent(AWSObject): - resource_type = 'CloudWatchEvent' - - props = { - 'Pattern': (dict, True), - 'Input': (basestring, False), - 'InputPath': (basestring, False) - } - - -class IoTRuleEvent(AWSObject): - resource_type = 'IoTRule' - - props = { - 'Sql': (basestring, True), - 'AwsIotSqlVersion': (basestring, False) - } - - -class AlexaSkillEvent(AWSObject): - resource_type = 'AlexaSkill' - props = {} - - -class SQSEvent(AWSObject): - resource_type = 'SQS' - - props = { - 'Queue': (basestring, True), - 'BatchSize': (positive_integer, True) - } - - def validate(self): - if (not 1 <= self.properties['BatchSize'] <= 10): - raise ValueError('BatchSize must be between 1 and 10') diff --git a/troposphere/servicecatalog.py b/troposphere/servicecatalog.py deleted file mode 100644 index 90018bd7c..000000000 --- a/troposphere/servicecatalog.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean - - -class AcceptedPortfolioShare(AWSObject): - resource_type = "AWS::ServiceCatalog::AcceptedPortfolioShare" - - props = { - 'AcceptLanguage': (basestring, False), - 'PortfolioId': (basestring, True), - } - - -class ProvisioningArtifactProperties(AWSProperty): - props = { - 'Description': (basestring, False), - 'Info': (dict, True), - 'Name': (basestring, False), - } - - -class CloudFormationProduct(AWSObject): - resource_type = "AWS::ServiceCatalog::CloudFormationProduct" - - props = { - 'AcceptLanguage': (basestring, False), - 'Description': (basestring, False), - 'Distributor': (basestring, False), - 'Name': (basestring, True), - 'Owner': (basestring, True), - 'ProvisioningArtifactParameters': - ([ProvisioningArtifactProperties], True), - 'SupportDescription': (basestring, False), - 'SupportEmail': (basestring, False), - 'SupportUrl': (basestring, False), - 'Tags': (Tags, False), - } - - -class ProvisioningParameter(AWSProperty): - props = { - 'Key': (basestring, False), - 'Value': (basestring, False), - } - - -class CloudFormationProvisionedProduct(AWSObject): - resource_type = "AWS::ServiceCatalog::CloudFormationProvisionedProduct" - - props = { - 'AcceptLanguage': (basestring, False), - 'NotificationArns': ([basestring], False), - 'PathId': (basestring, False), - 'ProductId': (basestring, False), - 'ProductName': (basestring, False), - 'ProvisionedProductName': (basestring, False), - 'ProvisioningArtifactId': (basestring, False), - 'ProvisioningArtifactName': (basestring, False), - 'ProvisioningParameters': ([ProvisioningParameter], False), - 'Tags': (Tags, False), - } - - -class LaunchNotificationConstraint(AWSObject): - resource_type = "AWS::ServiceCatalog::LaunchNotificationConstraint" - - props = { - 'AcceptLanguage': (basestring, False), - 'Description': (basestring, False), - 'NotificationArns': ([basestring], True), - 'PortfolioId': (basestring, True), - 'ProductId': (basestring, True), - } - - -class LaunchRoleConstraint(AWSObject): - resource_type = "AWS::ServiceCatalog::LaunchRoleConstraint" - - props = { - 'AcceptLanguage': (basestring, False), - 'Description': (basestring, False), - 'PortfolioId': (basestring, True), - 'ProductId': (basestring, True), - 'RoleArn': (basestring, True), - } - - -class LaunchTemplateConstraint(AWSObject): - resource_type = "AWS::ServiceCatalog::LaunchTemplateConstraint" - - props = { - 'AcceptLanguage': (basestring, False), - 'Description': (basestring, False), - 'PortfolioId': (basestring, True), - 'ProductId': (basestring, True), - 'Rules': (basestring, True), - } - - -class Portfolio(AWSObject): - resource_type = "AWS::ServiceCatalog::Portfolio" - - props = { - 'AcceptLanguage': (basestring, False), - 'Description': (basestring, False), - 'DisplayName': (basestring, True), - 'ProviderName': (basestring, True), - 'Tags': (Tags, False), - } - - -class PortfolioPrincipalAssociation(AWSObject): - resource_type = "AWS::ServiceCatalog::PortfolioPrincipalAssociation" - - props = { - 'AcceptLanguage': (basestring, False), - 'PortfolioId': (basestring, True), - 'PrincipalARN': (basestring, True), - 'PrincipalType': (basestring, True), - } - - -class PortfolioProductAssociation(AWSObject): - resource_type = "AWS::ServiceCatalog::PortfolioProductAssociation" - - props = { - 'AcceptLanguage': (basestring, False), - 'PortfolioId': (basestring, True), - 'ProductId': (basestring, True), - 'SourcePortfolioId': (basestring, False), - } - - -class PortfolioShare(AWSObject): - resource_type = "AWS::ServiceCatalog::PortfolioShare" - - props = { - 'AcceptLanguage': (basestring, False), - 'AccountId': (basestring, True), - 'PortfolioId': (basestring, True), - } - - -class TagOption(AWSObject): - resource_type = "AWS::ServiceCatalog::TagOption" - - props = { - 'Active': (boolean, False), - 'Key': (basestring, True), - 'Value': (basestring, True), - } - - -class TagOptionAssociation(AWSObject): - resource_type = "AWS::ServiceCatalog::TagOptionAssociation" - - props = { - 'ResourceId': (basestring, True), - 'TagOptionId': (basestring, True), - } diff --git a/troposphere/servicediscovery.py b/troposphere/servicediscovery.py deleted file mode 100644 index de18faa24..000000000 --- a/troposphere/servicediscovery.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty - - -class Instance(AWSObject): - resource_type = "AWS::ServiceDiscovery::Instance" - - props = { - 'InstanceAttributes': (dict, True), - 'InstanceId': (basestring, False), - 'ServiceId': (basestring, True), - } - - -class PrivateDnsNamespace(AWSObject): - resource_type = "AWS::ServiceDiscovery::PrivateDnsNamespace" - - props = { - 'Description': (basestring, False), - 'Name': (basestring, True), - 'Vpc': (basestring, True), - } - - -class PublicDnsNamespace(AWSObject): - resource_type = "AWS::ServiceDiscovery::PublicDnsNamespace" - - props = { - 'Description': (basestring, False), - 'Name': (basestring, True), - } - - -class HealthCheckConfig(AWSProperty): - props = { - 'FailureThreshold': (float, False), - 'ResourcePath': (basestring, False), - 'Type': (basestring, True), - } - - -class HealthCheckCustomConfig(AWSProperty): - props = { - 'FailureThreshold': (float, True) - } - - -class DnsRecord(AWSProperty): - props = { - 'TTL': (basestring, True), - 'Type': (basestring, True), - } - - -class DnsConfig(AWSProperty): - props = { - 'DnsRecords': ([DnsRecord], True), - 'NamespaceId': (basestring, True), - 'RoutingPolicy': (basestring, False), - } - - -class Service(AWSObject): - resource_type = "AWS::ServiceDiscovery::Service" - - props = { - 'Description': (basestring, False), - 'DnsConfig': (DnsConfig, True), - 'HealthCheckConfig': (HealthCheckConfig, False), - 'HealthCheckCustomConfig': (HealthCheckCustomConfig, False), - 'Name': (basestring, False), - 'NamespaceId': (basestring, False), - } - - -class HttpNamespace(AWSObject): - resource_type = "AWS::ServiceDiscovery::HttpNamespace" - - props = { - 'Description': (basestring, False), - 'Name': (basestring, True), - } diff --git a/troposphere/ses.py b/troposphere/ses.py deleted file mode 100644 index 745046fd9..000000000 --- a/troposphere/ses.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright (c) 2012-2018, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean - - -class DimensionConfiguration(AWSProperty): - props = { - 'DefaultDimensionValue': (basestring, True), - 'DimensionName': (basestring, True), - 'DimensionValueSource': (basestring, True), - } - - -class CloudWatchDestination(AWSProperty): - props = { - 'DimensionConfigurations': ([DimensionConfiguration], False), - } - - -class KinesisFirehoseDestination(AWSProperty): - props = { - 'DeliveryStreamARN': (basestring, True), - 'IAMRoleARN': (basestring, True), - } - - -class EventDestination(AWSProperty): - props = { - 'CloudWatchDestination': (CloudWatchDestination, False), - 'Enabled': (boolean, False), - 'KinesisFirehoseDestination': (KinesisFirehoseDestination, False), - 'MatchingEventTypes': ([basestring], True), - 'Name': (basestring, False), - } - - -class ConfigurationSetEventDestination(AWSObject): - resource_type = "AWS::SES::ConfigurationSetEventDestination" - - props = { - 'ConfigurationSetName': (basestring, True), - 'EventDestination': (EventDestination, True), - } - - -class ConfigurationSet(AWSObject): - resource_type = "AWS::SES::ConfigurationSet" - - props = { - 'Name': (basestring, False), - } - - -class IpFilter(AWSProperty): - props = { - 'Cidr': (basestring, True), - 'Policy': (basestring, True), - } - - -class Filter(AWSProperty): - props = { - 'IpFilter': (IpFilter, True), - 'Name': (basestring, False), - } - - -class ReceiptFilter(AWSObject): - resource_type = "AWS::SES::ReceiptFilter" - - props = { - 'Filter': (Filter, True), - } - - -class ReceiptRuleSet(AWSObject): - resource_type = "AWS::SES::ReceiptRuleSet" - - props = { - 'RuleSetName': (basestring, False), - } - - -class AddHeaderAction(AWSProperty): - props = { - 'HeaderName': (basestring, True), - 'HeaderValue': (basestring, True), - } - - -class BounceAction(AWSProperty): - props = { - 'Message': (basestring, True), - 'Sender': (basestring, True), - 'SmtpReplyCode': (basestring, True), - 'StatusCode': (basestring, False), - 'TopicArn': (basestring, False), - } - - -class LambdaAction(AWSProperty): - props = { - 'FunctionArn': (basestring, True), - 'InvocationType': (basestring, False), - 'TopicArn': (basestring, False), - } - - -class S3Action(AWSProperty): - props = { - 'BucketName': (basestring, True), - 'KmsKeyArn': (basestring, False), - 'ObjectKeyPrefix': (basestring, False), - 'TopicArn': (basestring, False), - } - - -class SNSAction(AWSProperty): - props = { - 'Encoding': (basestring, False), - 'TopicArn': (basestring, False), - } - - -class StopAction(AWSProperty): - props = { - 'Scope': (basestring, True), - 'TopicArn': (basestring, False), - } - - -class WorkmailAction(AWSProperty): - props = { - 'OrganizationArn': (basestring, True), - 'TopicArn': (basestring, False), - } - - -class Action(AWSProperty): - props = { - 'AddHeaderAction': (AddHeaderAction, False), - 'BounceAction': (BounceAction, False), - 'LambdaAction': (LambdaAction, False), - 'S3Action': (S3Action, False), - 'SNSAction': (SNSAction, False), - 'StopAction': (StopAction, False), - 'WorkmailAction': (WorkmailAction, False), - } - - -class Rule(AWSProperty): - props = { - 'Actions': ([Action], False), - 'Enabled': (boolean, False), - 'Name': (basestring, False), - 'Recipients': ([basestring], False), - 'ScanEnabled': (boolean, False), - 'TlsPolicy': (basestring, False), - } - - -class ReceiptRule(AWSObject): - resource_type = "AWS::SES::ReceiptRule" - - props = { - 'After': (basestring, False), - 'Rule': (Rule, True), - 'RuleSetName': (basestring, True), - } - - -class EmailTemplate(AWSProperty): - props = { - 'HtmlPart': (basestring, False), - 'SubjectPart': (basestring, False), - 'TemplateName': (basestring, False), - 'TextPart': (basestring, False), - } - - -class Template(AWSObject): - resource_type = "AWS::SES::Template" - - props = { - 'Template': (EmailTemplate, False), - } diff --git a/troposphere/sns.py b/troposphere/sns.py deleted file mode 100644 index 405beb2d7..000000000 --- a/troposphere/sns.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class Subscription(AWSProperty): - props = { - 'Endpoint': (basestring, True), - 'Protocol': (basestring, True), - } - - -class SubscriptionResource(AWSObject): - resource_type = "AWS::SNS::Subscription" - - props = { - 'DeliveryPolicy': (dict, False), - 'Endpoint': (basestring, False), - 'FilterPolicy': (dict, False), - 'Protocol': (basestring, True), - 'RawMessageDelivery': (boolean, False), - 'Region': (basestring, False), - 'TopicArn': (basestring, True), - } - - -class TopicPolicy(AWSObject): - resource_type = "AWS::SNS::TopicPolicy" - - props = { - 'PolicyDocument': (policytypes, True), - 'Topics': (list, True), - } - - -class Topic(AWSObject): - resource_type = "AWS::SNS::Topic" - - props = { - 'DisplayName': (basestring, False), - 'KmsMasterKeyId': (basestring, False), - 'Subscription': ([Subscription], False), - 'TopicName': (basestring, False), - } diff --git a/troposphere/sqs.py b/troposphere/sqs.py deleted file mode 100644 index 4a4eff1fa..000000000 --- a/troposphere/sqs.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import integer -try: - from awacs.aws import Policy - policytypes = (dict, Policy) -except ImportError: - policytypes = dict, - - -class RedrivePolicy(AWSProperty): - props = { - 'deadLetterTargetArn': (basestring, False), - 'maxReceiveCount': (integer, False), - } - - -class Queue(AWSObject): - resource_type = "AWS::SQS::Queue" - - props = { - 'ContentBasedDeduplication': (bool, False), - 'DelaySeconds': (integer, False), - 'FifoQueue': (bool, False), - 'KmsMasterKeyId': (basestring, False), - 'KmsDataKeyReusePeriodSeconds': (integer, False), - 'MaximumMessageSize': (integer, False), - 'MessageRetentionPeriod': (integer, False), - 'QueueName': (basestring, False), - 'ReceiveMessageWaitTimeSeconds': (integer, False), - 'RedrivePolicy': (RedrivePolicy, False), - 'Tags': (Tags, False), - 'VisibilityTimeout': (integer, False), - } - - def validate(self): - if self.properties.get('FifoQueue'): - queuename = self.properties.get('QueueName') - if queuename is None or isinstance(queuename, AWSHelperFn): - pass - elif not queuename.endswith('.fifo'): - raise ValueError("SQS: FIFO queues need to provide a " - "QueueName that ends with '.fifo'") - - -class QueuePolicy(AWSObject): - resource_type = "AWS::SQS::QueuePolicy" - - props = { - 'PolicyDocument': (policytypes, False), - 'Queues': (list, True), - } diff --git a/troposphere/ssm.py b/troposphere/ssm.py deleted file mode 100644 index 051a0cdf3..000000000 --- a/troposphere/ssm.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import (integer, boolean, s3_bucket_name, notification_type, - notification_event, json_checker, task_type, - operating_system, compliance_level) - - -class NotificationConfig(AWSProperty): - props = { - 'NotificationArn': (basestring, False), - 'NotificationEvents': (notification_event, False), - 'NotificationType': (notification_type, False), - } - - -class LoggingInfo(AWSProperty): - props = { - 'Region': (basestring, True), - 'S3Bucket': (s3_bucket_name, True), - 'S3Prefix': (basestring, False), - } - - -class MaintenanceWindowAutomationParameters(AWSProperty): - props = { - 'DocumentVersion': (basestring, False), - 'Parameters': (dict, False), - } - - -class MaintenanceWindowLambdaParameters(AWSProperty): - props = { - 'ClientContext': (basestring, False), - 'Payload': (json_checker, False), - 'Qualifier': (basestring, False), - } - - -class MaintenanceWindowRunCommandParameters(AWSProperty): - props = { - 'Comment': (basestring, False), - 'DocumentHash': (basestring, False), - 'DocumentHashType': (basestring, False), - 'NotificationConfig': (NotificationConfig, False), - 'OutputS3BucketName': (s3_bucket_name, False), - 'OutputS3KeyPrefix': (basestring, False), - 'Parameters': (dict, False), - 'ServiceRoleArn': (basestring, False), - 'TimeoutSeconds': (integer, False), - } - - -class MaintenanceWindowStepFunctionsParameters(AWSProperty): - props = { - 'Input': (basestring, False), - 'Name': (basestring, False), - } - - -class PatchFilter(AWSProperty): - props = { - 'Key': (basestring, True), - 'Values': ([basestring], True), - } - - -class PatchFilterGroup(AWSProperty): - props = { - 'PatchFilters': ([PatchFilter], False), - } - - -class Rule(AWSProperty): - props = { - 'ApproveAfterDays': (integer, False), - 'ComplianceLevel': (compliance_level, False), - 'PatchFilterGroup': (PatchFilterGroup, False), - } - - -class RuleGroup(AWSProperty): - props = { - 'PatchRules': ([Rule], False), - } - - -class TaskInvocationParameters(AWSProperty): - props = { - 'MaintenanceWindowAutomationParameters': - (MaintenanceWindowAutomationParameters, False), - 'MaintenanceWindowLambdaParameters': - (MaintenanceWindowLambdaParameters, False), - 'MaintenanceWindowRunCommandParameters': - (MaintenanceWindowRunCommandParameters, False), - 'MaintenanceWindowStepFunctionsParameters': - (MaintenanceWindowStepFunctionsParameters, False), - } - - -class Targets(AWSProperty): - props = { - 'Key': (basestring, True), - 'Values': ([basestring], True), - } - - -class S3OutputLocation(AWSProperty): - props = { - 'OutputS3BucketName': (basestring, False), - 'OutputS3KeyPrefix': (basestring, False), - } - - -class InstanceAssociationOutputLocation(AWSProperty): - props = { - 'S3Location': (S3OutputLocation, False), - } - - -class Association(AWSObject): - resource_type = "AWS::SSM::Association" - - props = { - 'AssociationName': (basestring, False), - 'DocumentVersion': (basestring, False), - 'InstanceId': (basestring, False), - 'Name': (basestring, True), - 'OutputLocation': (InstanceAssociationOutputLocation, False), - 'Parameters': (dict, False), - 'ScheduleExpression': (basestring, False), - 'Targets': ([Targets], False), - } - - -class Document(AWSObject): - resource_type = "AWS::SSM::Document" - - props = { - # Need a better implementation of the SSM Document - 'Content': (dict, True), - 'DocumentType': (basestring, False), - 'Tags': (Tags, False), - } - - -class MaintenanceWindow(AWSObject): - resource_type = "AWS::SSM::MaintenanceWindow" - - props = { - 'AllowUnassociatedTargets': (boolean, True), - 'Cutoff': (integer, True), - 'Description': (basestring, False), - 'Duration': (integer, True), - 'Name': (basestring, True), - 'Schedule': (basestring, True), - } - - -class MaintenanceWindowTarget(AWSObject): - resource_type = "AWS::SSM::MaintenanceWindowTarget" - - props = { - 'Description': (basestring, False), - 'Name': (basestring, False), - 'OwnerInformation': (basestring, False), - 'ResourceType': (basestring, True), - 'Targets': ([Targets], True), - 'WindowId': (basestring, True), - } - - -class MaintenanceWindowTask(AWSObject): - resource_type = "AWS::SSM::MaintenanceWindowTask" - - props = { - 'Description': (basestring, False), - 'LoggingInfo': (LoggingInfo, False), - 'MaxConcurrency': (basestring, False), - 'MaxErrors': (basestring, True), - 'Name': (basestring, False), - 'Priority': (integer, True), - 'ServiceRoleArn': (basestring, True), - 'Targets': ([Targets], True), - 'TaskArn': (basestring, True), - 'TaskInvocationParameters': (TaskInvocationParameters, False), - 'TaskParameters': (dict, False), - 'TaskType': (task_type, True), - 'WindowId': (basestring, False), - } - - -class Parameter(AWSObject): - resource_type = "AWS::SSM::Parameter" - - props = { - 'AllowedPattern': (basestring, False), - 'Description': (basestring, False), - 'Name': (basestring, False), - 'Type': (basestring, True), - 'Value': (basestring, True), - } - - -class PatchBaseline(AWSObject): - resource_type = "AWS::SSM::PatchBaseline" - - props = { - 'ApprovalRules': (RuleGroup, False), - 'ApprovedPatches': ([basestring], False), - 'ApprovedPatchesComplianceLevel': (compliance_level, False), - 'Description': (basestring, False), - 'GlobalFilters': (PatchFilterGroup, False), - 'Name': (basestring, True), - 'OperatingSystem': (operating_system, False), - 'PatchGroups': ([basestring], False), - 'RejectedPatches': ([basestring], False), - } - - -class ResourceDataSync(AWSObject): - resource_type = "AWS::SSM::ResourceDataSync" - - props = { - 'BucketName': (basestring, True), - 'BucketPrefix': (basestring, False), - 'BucketRegion': (basestring, True), - 'KMSKeyArn': (basestring, False), - 'SyncFormat': (basestring, True), - 'SyncName': (basestring, True), - } diff --git a/troposphere/stepfunctions.py b/troposphere/stepfunctions.py deleted file mode 100644 index e6a0c0d6d..000000000 --- a/troposphere/stepfunctions.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject - - -class Activity(AWSObject): - resource_type = "AWS::StepFunctions::Activity" - props = { - 'Name': (basestring, True), - } - - -class StateMachine(AWSObject): - resource_type = "AWS::StepFunctions::StateMachine" - props = { - 'StateMachineName': (basestring, False), - 'DefinitionString': (basestring, True), - 'RoleArn': (basestring, True), - - } diff --git a/troposphere/template_generator.py b/troposphere/template_generator.py deleted file mode 100644 index f218810e7..000000000 --- a/troposphere/template_generator.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -This module makes it possible to instantiate a new Troposphere Template object -from an existing CloudFormation Template. - -Usage: - from troposphere.template_generator import TemplateGenerator - import json - - with open("myCloudFormationTemplate.json") as f: - json_template = json.load(f) - - template = TemplateGenerator(json_template) - template.to_json() -""" - -import inspect -import pkgutil -import importlib -import os - -from collections import Sequence, Mapping - -from troposphere import ( - Template, Ref, - Output, Parameter, # AWSDeclarations - AWSObject, # covers resources - AWSHelperFn, GenericHelperFn, # covers ref, fn::, etc - Tags, autoscaling, cloudformation, Export) -from troposphere.policies import UpdatePolicy, CreationPolicy - - -class TemplateGenerator(Template): - DEPRECATED_MODULES = ['troposphere.dynamodb2'] - - _inspect_members = set() - _inspect_resources = {} - _custom_members = set() - _inspect_functions = {} - - def __init__(self, cf_template, **kwargs): - """ - Instantiates a new Troposphere Template based on an existing - Cloudformation Template. - """ - super(TemplateGenerator, self).__init__() - if 'CustomMembers' in kwargs: - self._custom_members = set(kwargs["CustomMembers"]) - - self._reference_map = {} - if 'AWSTemplateFormatVersion' in cf_template: - self.add_version(cf_template['AWSTemplateFormatVersion']) - if 'Transform' in cf_template: - self.add_transform(cf_template['Transform']) - if 'Description' in cf_template: - self.add_description(cf_template['Description']) - if 'Metadata' in cf_template: - self.add_metadata(cf_template['Metadata']) - for k, v in cf_template.get('Parameters', {}).iteritems(): - self.add_parameter(self._create_instance(Parameter, v, k)) - for k, v in cf_template.get('Mappings', {}).iteritems(): - self.add_mapping(k, self._convert_definition(v)) - for k, v in cf_template.get('Conditions', {}).iteritems(): - self.add_condition(k, self._convert_definition(v, k)) - for k, v in cf_template.get('Resources', {}).iteritems(): - self.add_resource(self._convert_definition( - v, k, - self._get_resource_type_cls(k, v) - )) - for k, v in cf_template.get('Outputs', {}).iteritems(): - self.add_output(self._create_instance(Output, v, k)) - - @property - def inspect_members(self): - """ - Returns the list of all troposphere members we are able to - construct - """ - if not self._inspect_members: - TemplateGenerator._inspect_members = \ - self._import_all_troposphere_modules() - return self._inspect_members - - @property - def inspect_resources(self): - """ Returns a map of `ResourceType: ResourceClass` """ - if not self._inspect_resources: - d = {} - for m in self.inspect_members: - if issubclass(m, (AWSObject, cloudformation.AWSCustomObject)) \ - and hasattr(m, 'resource_type'): - d[m.resource_type] = m - - TemplateGenerator._inspect_resources = d - - return self._inspect_resources - - @property - def inspect_functions(self): - """ Returns a map of `FunctionName: FunctionClass` """ - if not self._inspect_functions: - d = {} - for m in self.inspect_members: - if issubclass(m, AWSHelperFn): - d[m.__name__] = m - - TemplateGenerator._inspect_functions = d - - return self._inspect_functions - - def _get_resource_type_cls(self, name, resource): - """Attempts to return troposphere class that represents Type of - provided resource. Attempts to find the troposphere class who's - `resource_type` field is the same as the provided resources `Type` - field. - - :param resource: Resource to find troposphere class for - :return: None: If no class found for provided resource - type: Type of provided resource - :raise ResourceTypeNotDefined: - Provided resource does not have a `Type` field - """ - # If provided resource does not have `Type` field - if 'Type' not in resource: - raise ResourceTypeNotDefined(name) - - # Attempt to find troposphere resource with: - # `resource_type` == resource['Type'] - try: - return self.inspect_resources[resource['Type']] - except KeyError: - # is there a custom mapping? - for custom_member in self._custom_members: - if custom_member.resource_type == resource['Type']: - return custom_member - # If no resource with `resource_type` == resource['Type'] found - return None - - def _convert_definition(self, definition, ref=None, cls=None): - """ - Converts any object to its troposphere equivalent, if applicable. - This function will recurse into lists and mappings to create - additional objects as necessary. - - :param {*} definition: Object to convert - :param str ref: Name of key in parent dict that the provided definition - is from, can be None - :param type cls: Troposphere class which represents provided definition - """ - if isinstance(definition, Mapping): - if 'Type' in definition: # this is an AWS Resource - expected_type = None - if cls is not None: - expected_type = cls - else: - # if the user uses the custom way to name custom resources, - # we'll dynamically create a new subclass for this use and - # pass that instead of the typical CustomObject resource - try: - expected_type = self._generate_custom_type( - definition['Type']) - except TypeError: - # If definition['Type'] turns out not to be a custom - # type (aka doesn't start with "Custom::") - if ref is not None: - raise ResourceTypeNotFound(ref, definition['Type']) - else: - # Make sure expected_type is nothing (as - # it always should be) - assert not expected_type - - if expected_type: - args = self._normalize_properties(definition) - return self._create_instance(expected_type, args, ref) - - if len(definition) == 1: # This might be a function? - function_type = self._get_function_type( - definition.keys()[0]) - if function_type: - return self._create_instance( - function_type, definition.values()[0]) - - # nothing special here - return as dict - d = {} - for k, v in definition.iteritems(): - d[k] = self._convert_definition(v) - return d - - elif (isinstance(definition, Sequence) and - not isinstance(definition, basestring)): - return [self._convert_definition(v) for v in definition] - - # anything else is returned as-is - return definition - - def _create_instance(self, cls, args, ref=None): - """ - Returns an instance of `cls` with `args` passed as arguments. - - Recursively inspects `args` to create nested objects and functions as - necessary. - - `cls` will only be considered only if it's an object we track - (i.e.: troposphere objects). - - If `cls` has a `props` attribute, nested properties will be - instanciated as troposphere Property objects as necessary. - - If `cls` is a list and contains a single troposphere type, the - returned value will be a list of instances of that type. - """ - if isinstance(cls, Sequence): - if len(cls) == 1: - # a list of 1 type means we must provide a list of such objects - if (isinstance(args, basestring) or - not isinstance(args, Sequence)): - args = [args] - return [self._create_instance(cls[0], v) for v in args] - - if isinstance(cls, Sequence)\ - or cls not in self.inspect_members.union(self._custom_members): - # this object doesn't map to any known object. could be a string - # or int, or a Ref... or a list of types such as - # [basestring, FindInMap, Ref] or maybe a - # validator such as `integer` or `port_range` - return self._convert_definition(args) - - elif issubclass(cls, AWSHelperFn): - # special handling for functions, we want to handle it before - # entering the other conditions. - try: - if issubclass(cls, Tags): - arg_dict = {} - for d in args: - arg_dict[d['Key']] = d['Value'] - return cls(arg_dict) - - if (isinstance(args, Sequence) and - not isinstance(args, basestring)): - return cls(*self._convert_definition(args)) - - if issubclass(cls, autoscaling.Metadata): - return self._generate_autoscaling_metadata(cls, args) - - if issubclass(cls, Export): - return cls(args['Name']) - - args = self._convert_definition(args) - if isinstance(args, Ref) and issubclass(cls, Ref): - # watch out for double-refs... - # this can happen if an object's .props has 'Ref' - # as the expected type (which is wrong and should be - # changed to basestring!) - return args - - return cls(args) - - except TypeError as ex: - if '__init__() takes exactly' not in ex.message: - raise - # special AWSHelperFn typically take lowercased parameters, - # but templates use uppercase. for this reason we cannot - # map to most of them, so we fallback with a generic one. - # this might not work for all types if they do extra - # processing in their init routine - return GenericHelperFn(args) - - elif isinstance(args, Mapping): - # we try to build as many troposphere objects as we can by - # inspecting its type validation metadata - kwargs = {} - kwargs.update(args) - for prop_name in getattr(cls, 'props', []): - if prop_name not in kwargs: - continue # the user did not specify this value; skip it - expected_type = cls.props[prop_name][0] - - if (isinstance(expected_type, Sequence) or - expected_type in self.inspect_members): - kwargs[prop_name] = self._create_instance( - expected_type, kwargs[prop_name], prop_name) - else: - kwargs[prop_name] = self._convert_definition( - kwargs[prop_name], prop_name) - - args = self._convert_definition(kwargs) - if isinstance(args, Ref): - # use the returned ref instead of creating a new object - return args - if isinstance(args, AWSHelperFn): - return self._convert_definition(kwargs) - assert isinstance(args, Mapping) - return cls(title=ref, **args) - - return cls(self._convert_definition(args)) - - def _normalize_properties(self, definition): - """ - Inspects the definition and returns a copy of it that is updated - with any special property such as Condition, UpdatePolicy and the - like. - """ - args = definition.get('Properties', {}).copy() - if 'Condition' in definition: - args.update({'Condition': definition['Condition']}) - if 'UpdatePolicy' in definition: - # there's only 1 kind of UpdatePolicy; use it - args.update({'UpdatePolicy': self._create_instance( - UpdatePolicy, definition['UpdatePolicy'])}) - if 'CreationPolicy' in definition: - # there's only 1 kind of CreationPolicy; use it - args.update({'CreationPolicy': self._create_instance( - CreationPolicy, definition['CreationPolicy'])}) - if 'DeletionPolicy' in definition: - # DeletionPolicity is very basic - args.update( - {'DeletionPolicy': self._convert_definition( - definition['DeletionPolicy'])}) - if 'Metadata' in definition: - # there are various kind of metadata; pass it as-is - args.update( - {'Metadata': self._convert_definition( - definition['Metadata'])}) - if 'DependsOn' in definition: - args.update( - {'DependsOn': self._convert_definition( - definition['DependsOn'])}) - return args - - def _generate_custom_type(self, resource_type): - """ - Dynamically allocates a new CustomResource class definition using the - specified Custom::SomeCustomName resource type. This special resource - type is equivalent to the AWS::CloudFormation::CustomResource. - """ - if not resource_type.startswith("Custom::"): - raise TypeError("Custom types must start with Custom::") - custom_type = type( - str(resource_type.replace("::", "")), - (self.inspect_resources['AWS::CloudFormation::CustomResource'],), - {'resource_type': resource_type}) - self.inspect_members.add(custom_type) - self.inspect_resources[resource_type] = custom_type - return custom_type - - def _generate_autoscaling_metadata(self, cls, args): - """ Provides special handling for the autoscaling.Metadata object """ - assert isinstance(args, Mapping) - init_config = self._create_instance( - cloudformation.InitConfig, - args['AWS::CloudFormation::Init']['config']) - init = self._create_instance( - cloudformation.Init, {'config': init_config}) - auth = None - if 'AWS::CloudFormation::Authentication' in args: - auth_blocks = {} - for k in args['AWS::CloudFormation::Authentication']: - auth_blocks[k] = self._create_instance( - cloudformation.AuthenticationBlock, - args['AWS::CloudFormation::Authentication'][k], - k) - auth = self._create_instance( - cloudformation.Authentication, auth_blocks) - - return cls(init, auth) - - def _get_function_type(self, function_name): - """ - Returns the function object that matches the provided name. - Only Fn:: and Ref functions are supported here so that other - functions specific to troposphere are skipped. - """ - if (function_name.startswith("Fn::") and - function_name[4:] in self.inspect_functions): - return self.inspect_functions[function_name[4:]] - return (self.inspect_functions['Ref'] if function_name == "Ref" - else None) - - def _import_all_troposphere_modules(self): - """ Imports all troposphere modules and returns them """ - dirname = os.path.join(os.path.dirname(__file__)) - module_names = [ - pkg_name - for importer, pkg_name, is_pkg in - pkgutil.walk_packages([dirname], prefix="troposphere.") - if not is_pkg and pkg_name not in self.DEPRECATED_MODULES] - module_names.append('troposphere') - - modules = [] - for name in module_names: - modules.append(importlib.import_module(name)) - - def members_predicate(m): - return inspect.isclass(m) and not inspect.isbuiltin(m) - - members = [] - for module in modules: - members.extend((m[1] for m in inspect.getmembers( - module, members_predicate))) - - return set(members) - - -class ResourceTypeNotFound(Exception): - - def __init__(self, resource, resource_type): - Exception.__init__(self, - "ResourceType not found for " + - resource_type + " - " + resource) - self.resource_type = resource_type - self.resource = resource - - -class ResourceTypeNotDefined(Exception): - - def __init__(self, resource): - Exception.__init__(self, "ResourceType not defined for " + resource) - self.resource = resource diff --git a/troposphere/utils.py b/troposphere/utils.py deleted file mode 100644 index 670a68022..000000000 --- a/troposphere/utils.py +++ /dev/null @@ -1,41 +0,0 @@ -import time - - -def _tail_print(e): - print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) - - -def get_events(conn, stackname): - """Get the events in batches and return in chronological order""" - next = None - event_list = [] - while 1: - events = conn.describe_stack_events(stackname, next) - event_list.append(events) - if events.next_token is None: - break - next = events.next_token - time.sleep(1) - return reversed(sum(event_list, [])) - - -def tail(conn, stack_name, log_func=_tail_print, sleep_time=5, - include_initial=True): - """Show and then tail the event log""" - # First dump the full list of events in chronological order and keep - # track of the events we've seen already - seen = set() - initial_events = get_events(conn, stack_name) - for e in initial_events: - if include_initial: - log_func(e) - seen.add(e.event_id) - - # Now keep looping through and dump the new events - while 1: - events = get_events(conn, stack_name) - for e in events: - if e.event_id not in seen: - log_func(e) - seen.add(e.event_id) - time.sleep(sleep_time) diff --git a/troposphere/validators.py b/troposphere/validators.py deleted file mode 100644 index 1bb2890cb..000000000 --- a/troposphere/validators.py +++ /dev/null @@ -1,472 +0,0 @@ -# Copyright (c) 2012-2013, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -import json -from re import compile - - -def boolean(x): - if x in [True, 1, '1', 'true', 'True']: - return "true" - if x in [False, 0, '0', 'false', 'False']: - return "false" - raise ValueError - - -def integer(x): - try: - int(x) - except (ValueError, TypeError): - raise ValueError("%r is not a valid integer" % x) - else: - return x - - -def positive_integer(x): - p = integer(x) - if int(p) < 0: - raise ValueError("%r is not a positive integer" % x) - return x - - -def integer_range(minimum_val, maximum_val): - def integer_range_checker(x): - i = int(x) - if i < minimum_val or i > maximum_val: - raise ValueError('Integer must be between %d and %d' % ( - minimum_val, maximum_val)) - return x - - return integer_range_checker - - -def integer_list_item(allowed_values): - def integer_list_item_checker(x): - i = positive_integer(x) - if i in allowed_values: - return x - raise ValueError('Integer must be one of following: %s' % - ', '.join(str(j) for j in allowed_values)) - - return integer_list_item_checker - - -def double(x): - try: - float(x) - except (ValueError, TypeError): - raise ValueError("%r is not a valid double" % x) - else: - return x - - -def ignore(x): - """Method to indicate bypassing property validation""" - return x - - -def defer(x): - """Method to indicate defering property validation""" - return x - - -def network_port(x): - from . import AWSHelperFn - - # Network ports can be Ref items - if isinstance(x, AWSHelperFn): - return x - - i = integer(x) - if int(i) < -1 or int(i) > 65535: - raise ValueError("network port %r must been between 0 and 65535" % i) - return x - - -def tg_healthcheck_port(x): - if isinstance(x, str) and x == "traffic-port": - return x - return network_port(x) - - -def s3_bucket_name(b): - - # consecutive periods not allowed - - if '..' in b: - raise ValueError("%s is not a valid s3 bucket name" % b) - - # IP addresses not allowed - - ip_re = compile(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$') - if ip_re.match(b): - raise ValueError("%s is not a valid s3 bucket name" % b) - - s3_bucket_name_re = compile(r'^[a-z\d][a-z\d\.-]{1,61}[a-z\d]$') - if s3_bucket_name_re.match(b): - return b - else: - raise ValueError("%s is not a valid s3 bucket name" % b) - - -def elb_name(b): - elb_name_re = compile(r'^[a-zA-Z0-9](?:[a-zA-Z0-9\-]{0,30}[a-zA-Z0-9]{1})?$') # noqa - if elb_name_re.match(b): - return b - else: - raise ValueError("%s is not a valid elb name" % b) - - -def encoding(encoding): - valid_encodings = ['plain', 'base64'] - if encoding not in valid_encodings: - raise ValueError('Encoding needs to be one of %r' % valid_encodings) - return encoding - - -def status(status): - valid_statuses = ['Active', 'Inactive'] - if status not in valid_statuses: - raise ValueError('Status needs to be one of %r' % valid_statuses) - return status - - -def s3_transfer_acceleration_status(value): - valid_status = ['Enabled', 'Suspended'] - if value not in valid_status: - raise ValueError( - 'AccelerationStatus must be one of: "%s"' % ( - ', '.join(valid_status) - ) - ) - return value - - -def iam_names(b): - iam_name_re = compile(r'^[a-zA-Z0-9_\.\+\=\@\-\,]+$') - if iam_name_re.match(b): - return b - else: - raise ValueError("%s is not a valid iam name" % b) - - -def iam_user_name(user_name): - if not user_name: - raise ValueError( - "AWS::IAM::User property 'UserName' may not be empty") - - if len(user_name) > 64: - raise ValueError( - "AWS::IAM::User property 'UserName' may not exceed 64 characters") - - iam_user_name_re = compile(r'^[\w+=,.@-]+$') - if iam_user_name_re.match(user_name): - return user_name - else: - raise ValueError( - "%s is not a valid value for AWS::IAM::User property 'UserName'", - user_name) - - -def iam_path(path): - if len(path) > 512: - raise ValueError('IAM path %s may not exceed 512 characters', path) - - iam_path_re = compile(r'^\/.*\/$|^\/$') - if not iam_path_re.match(path): - raise ValueError("%s is not a valid iam path name" % path) - return path - - -def iam_role_name(role_name): - if len(role_name) > 64: - raise ValueError('IAM Role Name may not exceed 64 characters') - iam_names(role_name) - return role_name - - -def iam_group_name(group_name): - if len(group_name) > 128: - raise ValueError('IAM Role Name may not exceed 128 characters') - iam_names(group_name) - return group_name - - -def one_of(class_name, properties, property, conditionals): - if properties.get(property) not in conditionals: - raise ValueError( - '%s.%s must be one of: "%s"' % ( - class_name, property, ', '.join(conditionals) - ) - ) - - -def mutually_exclusive(class_name, properties, conditionals): - from . import NoValue - - found_list = [] - for c in conditionals: - if c in properties and not properties[c] == NoValue: - found_list.append(c) - seen = set(found_list) - specified_count = len(seen) - if specified_count > 1: - raise ValueError(('%s: only one of the following' - ' can be specified: %s') % ( - class_name, ', '.join(conditionals))) - return specified_count - - -def exactly_one(class_name, properties, conditionals): - specified_count = mutually_exclusive(class_name, properties, conditionals) - if specified_count != 1: - raise ValueError(('%s: one of the following' - ' must be specified: %s') % ( - class_name, ', '.join(conditionals))) - return specified_count - - -def check_required(class_name, properties, conditionals): - for c in conditionals: - if c not in properties: - raise ValueError("Resource %s required in %s" % c, class_name) - - -def json_checker(prop): - from . import AWSHelperFn - - if isinstance(prop, basestring): - # Verify it is a valid json string - json.loads(prop) - return prop - elif isinstance(prop, dict): - # Convert the dict to a basestring - return json.dumps(prop) - elif isinstance(prop, AWSHelperFn): - return prop - else: - raise ValueError("json object must be a str or dict") - - -def notification_type(notification): - valid_notifications = ['Command', 'Invocation'] - if notification not in valid_notifications: - raise ValueError( - 'NotificationType must be one of: "%s"' % ( - ', '.join(valid_notifications) - ) - ) - return notification - - -def notification_event(events): - valid_events = ['All', 'InProgress', 'Success', 'TimedOut', 'Cancelled', - 'Failed'] - for event in events: - if event not in valid_events: - raise ValueError( - 'NotificationEvents must be at least one of: "%s"' % ( - ', '.join(valid_events) - ) - ) - return events - - -def task_type(task): - valid_tasks = ['RUN_COMMAND', 'AUTOMATION', 'LAMBDA', 'STEP_FUNCTION'] - if task not in valid_tasks: - raise ValueError( - 'TaskType must be one of: "%s"' % ( - ', '.join(valid_tasks) - ) - ) - return task - - -def compliance_level(level): - valid_levels = ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFORMATIONAL', - 'UNSPECIFIED'] - if level not in valid_levels: - raise ValueError( - 'ApprovedPatchesComplianceLevel must be one of: "%s"' % ( - ', '.join(valid_levels) - ) - ) - return level - - -def operating_system(os): - valid_os = ['WINDOWS', 'AMAZON_LINUX', 'AMAZON_LINUX_2', 'UBUNTU', - 'REDHAT_ENTERPRISE_LINUX', 'SUSE', 'CENTOS'] - if os not in valid_os: - raise ValueError( - 'OperatingSystem must be one of: "%s"' % ( - ', '.join(valid_os) - ) - ) - return os - - -def vpn_pre_shared_key(key): - pre_shared_key_match_re = compile( - r'^(?!0)([A-Za-z0-9]|\_|\.){8,64}$' - ) - if not pre_shared_key_match_re.match(key): - raise ValueError( - '%s is not a valid key.' - ' Allowed characters are alphanumeric characters and ._. Must' - ' be between 8 and 64 characters in length and cannot' - ' start with zero (0).' % key - ) - return(key) - - -def vpn_tunnel_inside_cidr(cidr): - reserved_cidrs = [ - '169.254.0.0/30', - '169.254.1.0/30', - '169.254.2.0/30', - '169.254.3.0/30', - '169.254.4.0/30', - '169.254.5.0/30', - '169.254.169.252/30' - ] - cidr_match_re = compile( - r"^169\.254\.(?:25[0-5]|2[0-4]\d|[01]?\d\d?)" - r"\.(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\/30$" - ) - if cidr in reserved_cidrs: - raise ValueError( - 'The following CIDR blocks are reserved and cannot be used: "%s"' % - (', '.join(reserved_cidrs)) - ) - elif not cidr_match_re.match(cidr): - raise ValueError( - '%s is not a valid CIDR.' - ' A size /30 CIDR block from the 169.254.0.0/16 must be specified.' - % cidr) - return(cidr) - - -def vpc_endpoint_type(endpoint_type): - valid_types = ['Interface', 'Gateway'] - if endpoint_type not in valid_types: - raise ValueError( - 'VpcEndpointType must be one of: "%s"' % ( - ', '.join(valid_types) - ) - ) - return(endpoint_type) - - -def scalable_dimension_type(scalable_dimension): - valid_values = ['autoscaling:autoScalingGroup:DesiredCapacity', - 'ecs:service:DesiredCount', - 'ec2:spot-fleet-request:TargetCapacity', - 'rds:cluster:ReadReplicaCount', - 'dynamodb:table:ReadCapacityUnits', - 'dynamodb:table:WriteCapacityUnits', - 'dynamodb:index:ReadCapacityUnits', - 'dynamodb:index:WriteCapacityUnits' - ] - if scalable_dimension not in valid_values: - raise ValueError( - 'ScalableDimension must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(scalable_dimension) - - -def service_namespace_type(service_namespace): - valid_values = ['autoscaling', 'ecs', 'ec2', 'rds', 'dynamodb'] - if service_namespace not in valid_values: - raise ValueError( - 'ServiceNamespace must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(service_namespace) - - -def statistic_type(statistic): - valid_values = ['Average', 'Minimum', 'Maximum', - 'SampleCount', 'Sum' - ] - if statistic not in valid_values: - raise ValueError( - 'Statistic must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(statistic) - - -def key_usage_type(key): - valid_values = ['ENCRYPT_DECRYPT'] - if key not in valid_values: - raise ValueError( - 'KeyUsage must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(key) - - -def cloudfront_event_type(event_type): - valid_values = ['viewer-request', 'viewer-response', - 'origin-request', 'origin-response'] - if event_type not in valid_values: - raise ValueError( - 'EventType must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(event_type) - - -def cloudfront_viewer_protocol_policy(viewer_protocol_policy): - valid_values = ['allow-all', 'redirect-to-https', 'https-only'] - if viewer_protocol_policy not in valid_values: - raise ValueError( - 'ViewerProtocolPolicy must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(viewer_protocol_policy) - - -def cloudfront_restriction_type(restriction_type): - valid_values = ['none', 'blacklist', 'whitelist'] - if restriction_type not in valid_values: - raise ValueError( - 'RestrictionType must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(restriction_type) - - -def cloudfront_forward_type(forward): - valid_values = ['none', 'all', 'whitelist'] - if forward not in valid_values: - raise ValueError( - 'Forward must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(forward) - - -def priceclass_type(price_class): - valid_values = ['PriceClass_100', 'PriceClass_200', - 'PriceClass_All'] - if price_class not in valid_values: - raise ValueError( - 'PriceClass must be one of: "%s"' % ( - ', '.join(valid_values) - ) - ) - return(price_class) diff --git a/troposphere/waf.py b/troposphere/waf.py deleted file mode 100644 index 06ac7bf47..000000000 --- a/troposphere/waf.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (c) 2012-2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -class Action(AWSProperty): - props = { - 'Type': (basestring, True) - } - - -class FieldToMatch(AWSProperty): - props = { - 'Data': (basestring, False), # Conditional - 'Type': (basestring, True) - } - - -class ByteMatchTuples(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'PositionalConstraint': (basestring, True), - 'TargetString': (basestring, False), # Conditional - 'TargetStringBase64': (basestring, False), # Conditional - 'TextTransformation': (basestring, True) - } - - -class IPSetDescriptors(AWSProperty): - props = { - 'Type': (basestring, True), - 'Value': (basestring, True) - } - - -class Predicates(AWSProperty): - props = { - 'DataId': (basestring, True), - 'Negated': (boolean, True), - 'Type': (basestring, True) - } - - -class Rules(AWSProperty): - props = { - 'Action': (Action, True), - 'Priority': (integer, True), - 'RuleId': (basestring, True) - } - - -class SqlInjectionMatchTuples(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'TextTransformation': (basestring, True) - } - - -class ByteMatchSet(AWSObject): - resource_type = "AWS::WAF::ByteMatchSet" - - props = { - 'ByteMatchTuples': ([ByteMatchTuples], False), - 'Name': (basestring, True) - } - - -class IPSet(AWSObject): - resource_type = "AWS::WAF::IPSet" - - props = { - 'IPSetDescriptors': ([IPSetDescriptors], False), - 'Name': (basestring, True) - } - - -class Rule(AWSObject): - resource_type = "AWS::WAF::Rule" - - props = { - 'MetricName': (basestring, True), - 'Name': (basestring, True), - 'Predicates': ([Predicates], False) - } - - -class SqlInjectionMatchSet(AWSObject): - resource_type = "AWS::WAF::SqlInjectionMatchSet" - - props = { - 'Name': (basestring, True), - 'SqlInjectionMatchTuples': ([SqlInjectionMatchTuples], False) - } - - -class WebACL(AWSObject): - resource_type = "AWS::WAF::WebACL" - - props = { - 'DefaultAction': (Action, True), - 'MetricName': (basestring, True), - 'Name': (basestring, True), - 'Rules': ([Rules], False) - } - - -class SizeConstraint(AWSProperty): - props = { - 'ComparisonOperator': (basestring, True), - 'FieldToMatch': (FieldToMatch, True), - 'Size': (integer, True), - 'TextTransformation': (basestring, True), - } - - -class SizeConstraintSet(AWSObject): - resource_type = "AWS::WAF::SizeConstraintSet" - - props = { - 'Name': (basestring, True), - 'SizeConstraints': ([SizeConstraint], False), - } - - -class XssMatchTuple(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'TextTransformation': (basestring, True), - } - - -class XssMatchSet(AWSObject): - resource_type = "AWS::WAF::XssMatchSet" - - props = { - 'Name': (basestring, True), - 'XssMatchTuples': ([XssMatchTuple], False), - } diff --git a/troposphere/wafregional.py b/troposphere/wafregional.py deleted file mode 100644 index dfa563b12..000000000 --- a/troposphere/wafregional.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright (c) 2012-2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty -from .validators import boolean, integer - - -class Action(AWSProperty): - props = { - 'Type': (basestring, True) - } - - -class FieldToMatch(AWSProperty): - props = { - 'Data': (basestring, False), # Conditional - 'Type': (basestring, True) - } - - -class ByteMatchTuples(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'PositionalConstraint': (basestring, True), - 'TargetString': (basestring, False), # Conditional - 'TargetStringBase64': (basestring, False), # Conditional - 'TextTransformation': (basestring, True) - } - - -class IPSetDescriptors(AWSProperty): - props = { - 'Type': (basestring, True), - 'Value': (basestring, True) - } - - -class Predicates(AWSProperty): - props = { - 'DataId': (basestring, True), - 'Negated': (boolean, True), - 'Type': (basestring, True) - } - - -class Rules(AWSProperty): - props = { - 'Action': (Action, True), - 'Priority': (integer, True), - 'RuleId': (basestring, True) - } - - -class SqlInjectionMatchTuples(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'TextTransformation': (basestring, True) - } - - -class ByteMatchSet(AWSObject): - resource_type = "AWS::WAFRegional::ByteMatchSet" - - props = { - 'ByteMatchTuples': ([ByteMatchTuples], False), - 'Name': (basestring, True) - } - - -class IPSet(AWSObject): - resource_type = "AWS::WAFRegional::IPSet" - - props = { - 'IPSetDescriptors': ([IPSetDescriptors], False), - 'Name': (basestring, True) - } - - -class Rule(AWSObject): - resource_type = "AWS::WAFRegional::Rule" - - props = { - 'MetricName': (basestring, True), - 'Name': (basestring, True), - 'Predicates': ([Predicates], False) - } - - -class SqlInjectionMatchSet(AWSObject): - resource_type = "AWS::WAFRegional::SqlInjectionMatchSet" - - props = { - 'Name': (basestring, True), - 'SqlInjectionMatchTuples': ([SqlInjectionMatchTuples], False) - } - - -class WebACL(AWSObject): - resource_type = "AWS::WAFRegional::WebACL" - - props = { - 'DefaultAction': (Action, True), - 'MetricName': (basestring, True), - 'Name': (basestring, True), - 'Rules': ([Rules], False) - } - - -class WebACLAssociation(AWSObject): - resource_type = "AWS::WAFRegional::WebACLAssociation" - - props = { - 'ResourceArn': (basestring, True), - 'WebACLId': (basestring, True), - } - - -class SizeConstraint(AWSProperty): - props = { - 'ComparisonOperator': (basestring, True), - 'FieldToMatch': (FieldToMatch, True), - 'Size': (integer, True), - 'TextTransformation': (basestring, True), - } - - -class SizeConstraintSet(AWSObject): - resource_type = "AWS::WAFRegional::SizeConstraintSet" - - props = { - 'Name': (basestring, True), - 'SizeConstraints': ([SizeConstraint], False), - } - - -class XssMatchTuple(AWSProperty): - props = { - 'FieldToMatch': (FieldToMatch, True), - 'TextTransformation': (basestring, True), - } - - -class XssMatchSet(AWSObject): - resource_type = "AWS::WAFRegional::XssMatchSet" - - props = { - 'Name': (basestring, True), - 'XssMatchTuples': ([XssMatchTuple], False), - } diff --git a/troposphere/workspaces.py b/troposphere/workspaces.py deleted file mode 100644 index 8c529c766..000000000 --- a/troposphere/workspaces.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2015, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - -from . import AWSObject, AWSProperty, Tags -from .validators import boolean, integer - - -class WorkspaceProperties(AWSProperty): - props = { - 'ComputeTypeName': (basestring, False), - 'RootVolumeSizeGib': (integer, False), - 'RunningMode': (basestring, False), - 'RunningModeAutoStopTimeoutInMinutes': (integer, False), - 'UserVolumeSizeGib': (integer, False), - } - - -class Workspace(AWSObject): - resource_type = "AWS::WorkSpaces::Workspace" - - props = { - 'BundleId': (basestring, True), - 'DirectoryId': (basestring, True), - 'UserName': (basestring, True), - 'RootVolumeEncryptionEnabled': (boolean, False), - 'Tags': (Tags, False), - 'UserVolumeEncryptionEnabled': (boolean, False), - 'VolumeEncryptionKey': (basestring, False), - 'WorkspaceProperties': (WorkspaceProperties, False), - } From 794d2deee2d5167b87d22ad03f116ae1176dc9fa Mon Sep 17 00:00:00 2001 From: DrLuke Date: Sun, 17 Feb 2019 17:15:18 +0100 Subject: [PATCH 60/62] Add aws_objects --- troposphere/__init__.py | 835 ------------------------------------- troposphere/aws_objects.py | 28 ++ 2 files changed, 28 insertions(+), 835 deletions(-) create mode 100644 troposphere/aws_objects.py diff --git a/troposphere/__init__.py b/troposphere/__init__.py index 33f108655..e69de29bb 100644 --- a/troposphere/__init__.py +++ b/troposphere/__init__.py @@ -1,835 +0,0 @@ -# Copyright (c) 2012-2017, Mark Peek -# All rights reserved. -# -# See LICENSE file for full license. - - -import cfn_flip -import collections -import json -import re -import sys -import types - -from . import validators - -__version__ = "2.4.1" - -# constants for DeletionPolicy and UpdateReplacePolicy -Delete = 'Delete' -Retain = 'Retain' -Snapshot = 'Snapshot' - -# Pseudo Parameters -AWS_ACCOUNT_ID = 'AWS::AccountId' -AWS_NOTIFICATION_ARNS = 'AWS::NotificationARNs' -AWS_NO_VALUE = 'AWS::NoValue' -AWS_PARTITION = 'AWS::Partition' -AWS_REGION = 'AWS::Region' -AWS_STACK_ID = 'AWS::StackId' -AWS_STACK_NAME = 'AWS::StackName' -AWS_URL_SUFFIX = 'AWS::URLSuffix' - -# Template Limits -MAX_MAPPINGS = 100 -MAX_OUTPUTS = 60 -MAX_PARAMETERS = 60 -MAX_RESOURCES = 200 -PARAMETER_TITLE_MAX = 255 - -valid_names = re.compile(r'^[a-zA-Z0-9]+$') - - -def is_aws_object_subclass(cls): - is_aws_object = False - try: - is_aws_object = issubclass(cls, BaseAWSObject) - # prop_type isn't a class - except TypeError: - pass - return is_aws_object - - -def encode_to_dict(obj): - if hasattr(obj, 'to_dict'): - # Calling encode_to_dict to ensure object is - # nomalized to a base dictionary all the way down. - return encode_to_dict(obj.to_dict()) - elif isinstance(obj, (list, tuple)): - new_lst = [] - for o in list(obj): - new_lst.append(encode_to_dict(o)) - return new_lst - elif isinstance(obj, dict): - props = {} - for name, prop in obj.items(): - props[name] = encode_to_dict(prop) - - return props - # This is useful when dealing with external libs using - # this format. Specifically awacs. - elif hasattr(obj, 'JSONrepr'): - return encode_to_dict(obj.JSONrepr()) - return obj - - -def depends_on_helper(obj): - """ Handles using .title if the given object is a troposphere resource. - - If the given object is a troposphere resource, use the `.title` attribute - of that resource. If it's a string, just use the string. This should allow - more pythonic use of DependsOn. - """ - if isinstance(obj, AWSObject): - return obj.title - elif isinstance(obj, list): - return list(map(depends_on_helper, obj)) - return obj - - -class BaseAWSObject(object): - def __init__(self, title, template=None, validation=True, **kwargs): - self.title = title - self.template = template - self.do_validation = validation - # Cache the keys for validity checks - self.propnames = self.props.keys() - self.attributes = [ - 'Condition', 'CreationPolicy', 'DeletionPolicy', 'DependsOn', - 'Metadata', 'UpdatePolicy', 'UpdateReplacePolicy', - ] - - # try to validate the title if its there - if self.title: - self.validate_title() - - # Create the list of properties set on this object by the user - self.properties = {} - dictname = getattr(self, 'dictname', None) - if dictname: - self.resource = { - dictname: self.properties, - } - else: - self.resource = self.properties - if hasattr(self, 'resource_type') and self.resource_type is not None: - self.resource['Type'] = self.resource_type - self.__initialized = True - - # Check for properties defined in the class - for k, (_, required) in self.props.items(): - v = getattr(type(self), k, None) - if v is not None and k not in kwargs: - self.__setattr__(k, v) - - # Now that it is initialized, populate it with the kwargs - for k, v in kwargs.items(): - self.__setattr__(k, v) - - self.add_to_template() - - def add_to_template(self): - # Bound it to template if we know it - if self.template is not None: - self.template.add_resource(self) - - def __getattr__(self, name): - # If pickle loads this object, then __getattr__ will cause - # an infinite loop when pickle invokes this object to look for - # __setstate__ before attributes is "loaded" into this object. - # Therefore, short circuit the rest of this call if attributes - # is not loaded yet. - if "attributes" not in self.__dict__: - raise AttributeError(name) - try: - if name in self.attributes: - return self.resource[name] - else: - return self.properties.__getitem__(name) - except KeyError: - # Fall back to the name attribute in the object rather than - # in the properties dict. This is for non-OpenStack backwards - # compatibility since OpenStack objects use a "name" property. - if name == 'name': - return self.__getattribute__('title') - raise AttributeError(name) - - def __setattr__(self, name, value): - if name in self.__dict__.keys() \ - or '_BaseAWSObject__initialized' not in self.__dict__: - return dict.__setattr__(self, name, value) - elif name in self.attributes: - if name == "DependsOn": - self.resource[name] = depends_on_helper(value) - else: - self.resource[name] = value - return None - elif name in self.propnames: - # Check the type of the object and compare against what we were - # expecting. - expected_type = self.props[name][0] - - # If the value is a AWSHelperFn we can't do much validation - # we'll have to leave that to Amazon. Maybe there's another way - # to deal with this that we'll come up with eventually - if isinstance(value, AWSHelperFn): - return self.properties.__setitem__(name, value) - - # If it's a function, call it... - elif isinstance(expected_type, types.FunctionType): - try: - value = expected_type(value) - except Exception: - sys.stderr.write( - "%s: %s.%s function validator '%s' threw " - "exception:\n" % (self.__class__, - self.title, - name, - expected_type.__name__)) - raise - return self.properties.__setitem__(name, value) - - # If it's a list of types, check against those types... - elif isinstance(expected_type, list): - # If we're expecting a list, then make sure it is a list - if not isinstance(value, list): - self._raise_type(name, value, expected_type) - - # Iterate over the list and make sure it matches our - # type checks (as above accept AWSHelperFn because - # we can't do the validation ourselves) - for v in value: - if not isinstance(v, tuple(expected_type)) \ - and not isinstance(v, AWSHelperFn): - self._raise_type(name, v, expected_type) - # Validated so assign it - return self.properties.__setitem__(name, value) - - # Final validity check, compare the type of value against - # expected_type which should now be either a single type or - # a tuple of types. - elif isinstance(value, expected_type): - return self.properties.__setitem__(name, value) - else: - self._raise_type(name, value, expected_type) - - type_name = getattr(self, 'resource_type', self.__class__.__name__) - - if type_name == 'AWS::CloudFormation::CustomResource' or \ - type_name.startswith('Custom::'): - # Add custom resource arguments to the dict without any further - # validation. The properties of a CustomResource is not known. - return self.properties.__setitem__(name, value) - - raise AttributeError("%s object does not support attribute %s" % - (type_name, name)) - - def _raise_type(self, name, value, expected_type): - raise TypeError('%s: %s.%s is %s, expected %s' % (self.__class__, - self.title, - name, - type(value), - expected_type)) - - def validate_title(self): - if not valid_names.match(self.title): - raise ValueError('Name "%s" not alphanumeric' % self.title) - - def validate(self): - pass - - def no_validation(self): - self.do_validation = False - return self - - def to_dict(self): - if self.do_validation: - self._validate_props() - self.validate() - - if self.properties: - return encode_to_dict(self.resource) - elif hasattr(self, 'resource_type'): - d = {} - for k, v in self.resource.items(): - if k != 'Properties': - d[k] = v - return d - else: - return {} - - @classmethod - def _from_dict(cls, title=None, **kwargs): - props = {} - for prop_name, value in kwargs.items(): - try: - prop_attrs = cls.props[prop_name] - except KeyError: - raise AttributeError("Object type %s does not have a " - "%s property." % (cls.__name__, - prop_name)) - prop_type = prop_attrs[0] - value = kwargs[prop_name] - is_aws_object = is_aws_object_subclass(prop_type) - if is_aws_object: - if not isinstance(value, collections.Mapping): - raise ValueError("Property definition for %s must be " - "a Mapping type" % prop_name) - value = prop_type._from_dict(**value) - - if isinstance(prop_type, list): - if not isinstance(value, list): - raise TypeError("Attribute %s must be a " - "list." % prop_name) - new_value = [] - for v in value: - new_v = v - if is_aws_object_subclass(prop_type[0]): - if not isinstance(v, collections.Mapping): - raise ValueError( - "Property definition for %s must be " - "a list of Mapping types" % prop_name) - new_v = prop_type[0]._from_dict(**v) - new_value.append(new_v) - value = new_value - props[prop_name] = value - if title: - return cls(title, **props) - return cls(**props) - - @classmethod - def from_dict(cls, title, d): - return cls._from_dict(title, **d) - - def _validate_props(self): - for k, (_, required) in self.props.items(): - if required and k not in self.properties: - rtype = getattr(self, 'resource_type', "") - title = getattr(self, 'title') - msg = "Resource %s required in type %s" % (k, rtype) - if title: - msg += " (title: %s)" % title - raise ValueError(msg) - - -class AWSObject(BaseAWSObject): - dictname = 'Properties' - - def ref(self): - return Ref(self) - - Ref = ref - - def get_att(self, value): - return GetAtt(self, value) - - GetAtt = get_att - - -class AWSDeclaration(BaseAWSObject): - """ - Used for CloudFormation Resource Property objects - http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/ - aws-product-property-reference.html - """ - - def __init__(self, title, **kwargs): - super(AWSDeclaration, self).__init__(title, **kwargs) - - def ref(self): - return Ref(self) - - Ref = ref - - -class AWSProperty(BaseAWSObject): - """ - Used for CloudFormation Resource Property objects - http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/ - aws-product-property-reference.html - """ - dictname = None - - def __init__(self, title=None, **kwargs): - super(AWSProperty, self).__init__(title, **kwargs) - - -class AWSAttribute(BaseAWSObject): - dictname = None - - """ - Used for CloudFormation Resource Attribute objects - http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/ - aws-product-attribute-reference.html - """ - - def __init__(self, title=None, **kwargs): - super(AWSAttribute, self).__init__(title, **kwargs) - - -def validate_delimiter(delimiter): - if not isinstance(delimiter, basestring): - raise ValueError( - "Delimiter must be a String, %s provided" % type(delimiter) - ) - - -def validate_pausetime(pausetime): - if not pausetime.startswith('PT'): - raise ValueError('PauseTime should look like PT#H#M#S') - return pausetime - - -class UpdatePolicy(BaseAWSObject): - def __init__(self, title, **kwargs): - raise DeprecationWarning( - "This UpdatePolicy class is deprecated, please switch to using " - "the more general UpdatePolicy in troposphere.policies.\n" - ) - - -class AWSHelperFn(object): - def getdata(self, data): - if isinstance(data, BaseAWSObject): - return data.title - else: - return data - - def to_dict(self): - return encode_to_dict(self.data) - - -class GenericHelperFn(AWSHelperFn): - """ Used as a fallback for the template generator """ - def __init__(self, data): - self.data = self.getdata(data) - - def to_dict(self): - return encode_to_dict(self.data) - - -class Base64(AWSHelperFn): - def __init__(self, data): - self.data = {'Fn::Base64': data} - - -class FindInMap(AWSHelperFn): - def __init__(self, mapname, key, value): - self.data = {'Fn::FindInMap': [self.getdata(mapname), key, value]} - - -class GetAtt(AWSHelperFn): - def __init__(self, logicalName, attrName): # noqa: N803 - self.data = {'Fn::GetAtt': [self.getdata(logicalName), attrName]} - - -class Cidr(AWSHelperFn): - def __init__(self, ipblock, count, sizemask=None): - if sizemask: - self.data = {'Fn::Cidr': [ipblock, count, sizemask]} - else: - self.data = {'Fn::Cidr': [ipblock, count]} - - -class GetAZs(AWSHelperFn): - def __init__(self, region=""): - self.data = {'Fn::GetAZs': region} - - -class If(AWSHelperFn): - def __init__(self, cond, true, false): - self.data = {'Fn::If': [self.getdata(cond), true, false]} - - -class Equals(AWSHelperFn): - def __init__(self, value_one, value_two): - self.data = {'Fn::Equals': [value_one, value_two]} - - -class And(AWSHelperFn): - def __init__(self, cond_one, cond_two, *conds): - self.data = {'Fn::And': [cond_one, cond_two] + list(conds)} - - -class Or(AWSHelperFn): - def __init__(self, cond_one, cond_two, *conds): - self.data = {'Fn::Or': [cond_one, cond_two] + list(conds)} - - -class Not(AWSHelperFn): - def __init__(self, cond): - self.data = {'Fn::Not': [self.getdata(cond)]} - - -class Join(AWSHelperFn): - def __init__(self, delimiter, values): - validate_delimiter(delimiter) - self.data = {'Fn::Join': [delimiter, values]} - - -class Split(AWSHelperFn): - def __init__(self, delimiter, values): - validate_delimiter(delimiter) - self.data = {'Fn::Split': [delimiter, values]} - - -class Sub(AWSHelperFn): - def __init__(self, input_str, dict_values=None, **values): - # merge dict - if dict_values: - values.update(dict_values) - self.data = {'Fn::Sub': [input_str, values] if values else input_str} - - -class Name(AWSHelperFn): - def __init__(self, data): - self.data = self.getdata(data) - - -class Select(AWSHelperFn): - def __init__(self, indx, objects): - self.data = {'Fn::Select': [indx, objects]} - - -class Ref(AWSHelperFn): - def __init__(self, data): - self.data = {'Ref': self.getdata(data)} - - def __eq__(self, other): - if isinstance(other, self.__class__): - return self.data == other.data - return self.data.values()[0] == other - - def __hash__(self): - return hash(self.data.values()[0]) - - -# Pseudo Parameter Ref's -AccountId = Ref(AWS_ACCOUNT_ID) -NotificationARNs = Ref(AWS_NOTIFICATION_ARNS) -NoValue = Ref(AWS_NO_VALUE) -Region = Ref(AWS_REGION) -StackId = Ref(AWS_STACK_ID) -StackName = Ref(AWS_STACK_NAME) - - -class Condition(AWSHelperFn): - def __init__(self, data): - self.data = {'Condition': self.getdata(data)} - - -class ImportValue(AWSHelperFn): - def __init__(self, data): - self.data = {'Fn::ImportValue': data} - - -class Tags(AWSHelperFn): - def __init__(self, *args, **kwargs): - if not args: - # Assume kwargs variant - tag_dict = kwargs - else: - if len(args) != 1: - raise(TypeError, "Multiple non-kwargs passed to Tags") - - # Validate single argument passed in is a dict - if not isinstance(args[0], dict): - raise(TypeError, "Tags needs to be either kwargs or dict") - tag_dict = args[0] - - def add_tag(tag_list, k, v): - tag_list.append({'Key': k, 'Value': v, }) - - self.tags = [] - - # Detect and handle non-string Tag items which do not sort in Python3 - if all(isinstance(k, basestring) for k in tag_dict): - for k, v in sorted(tag_dict.items()): - add_tag(self.tags, k, v) - else: - for k, v in tag_dict.items(): - add_tag(self.tags, k, v) - - # allow concatenation of the Tags object via '+' operator - def __add__(self, newtags): - newtags.tags = self.tags + newtags.tags - return newtags - - def to_dict(self): - return [encode_to_dict(tag) for tag in self.tags] - - @classmethod - def from_dict(cls, title=None, **kwargs): - return cls(**kwargs) - - -class Template(object): - props = { - 'AWSTemplateFormatVersion': (basestring, False), - 'Transform': (basestring, False), - 'Description': (basestring, False), - 'Parameters': (dict, False), - 'Mappings': (dict, False), - 'Resources': (dict, False), - 'Outputs': (dict, False), - } - - def __init__(self, Description=None, Metadata=None): # noqa: N803 - self.description = Description - self.metadata = {} if Metadata is None else Metadata - self.conditions = {} - self.mappings = {} - self.outputs = {} - self.parameters = {} - self.resources = {} - self.version = None - self.transform = None - - def add_description(self, description): - self.description = description - - def add_metadata(self, metadata): - self.metadata = metadata - - def add_condition(self, name, condition): - self.conditions[name] = condition - return name - - def handle_duplicate_key(self, key): - raise ValueError('duplicate key "%s" detected' % key) - - def _update(self, d, values): - if isinstance(values, list): - for v in values: - if v.title in d: - self.handle_duplicate_key(v.title) - d[v.title] = v - else: - if values.title in d: - self.handle_duplicate_key(values.title) - d[values.title] = values - return values - - def add_output(self, output): - if len(self.outputs) >= MAX_OUTPUTS: - raise ValueError('Maximum outputs %d reached' % MAX_OUTPUTS) - return self._update(self.outputs, output) - - def add_mapping(self, name, mapping): - if len(self.mappings) >= MAX_MAPPINGS: - raise ValueError('Maximum mappings %d reached' % MAX_MAPPINGS) - self.mappings[name] = mapping - - def add_parameter(self, parameter): - if len(self.parameters) >= MAX_PARAMETERS: - raise ValueError('Maximum parameters %d reached' % MAX_PARAMETERS) - return self._update(self.parameters, parameter) - - def get_or_add_parameter(self, parameter): - if parameter.title in self.parameters: - return self.parameters[parameter.title] - else: - self.add_parameter(parameter) - return parameter - - def add_resource(self, resource): - if len(self.resources) >= MAX_RESOURCES: - raise ValueError('Maximum number of resources %d reached' - % MAX_RESOURCES) - return self._update(self.resources, resource) - - def add_version(self, version=None): - if version: - self.version = version - else: - self.version = "2010-09-09" - - def add_transform(self, transform): - self.transform = transform - - def to_dict(self): - t = {} - if self.description: - t['Description'] = self.description - if self.metadata: - t['Metadata'] = self.metadata - if self.conditions: - t['Conditions'] = self.conditions - if self.mappings: - t['Mappings'] = self.mappings - if self.outputs: - t['Outputs'] = self.outputs - if self.parameters: - t['Parameters'] = self.parameters - if self.version: - t['AWSTemplateFormatVersion'] = self.version - if self.transform: - t['Transform'] = self.transform - t['Resources'] = self.resources - - return encode_to_dict(t) - - def set_parameter_label(self, parameter, label): - """ - Sets the Label used in the User Interface for the given parameter. - :type parameter: str or Parameter - :type label: str - """ - labels = self.metadata\ - .setdefault("AWS::CloudFormation::Interface", {})\ - .setdefault("ParameterLabels", {}) - - if isinstance(parameter, BaseAWSObject): - parameter = parameter.title - - labels[parameter] = {"default": label} - - def add_parameter_to_group(self, parameter, group_name): - """ - Add a parameter under a group (created if needed). - :type parameter: str or Parameter - :type group_name: str - """ - groups = self.metadata \ - .setdefault("AWS::CloudFormation::Interface", {}) \ - .setdefault("ParameterGroups", []) - - if isinstance(parameter, BaseAWSObject): - parameter = parameter.title - - # Check if group_name already exists - existing_group = None - for group in groups: - if group["Label"]["default"] == group_name: - existing_group = group - break - - if existing_group is None: - existing_group = { - "Label": {"default": group_name}, - "Parameters": [], - } - groups.append(existing_group) - - existing_group["Parameters"].append(parameter) - - return group_name - - def to_json(self, indent=4, sort_keys=True, separators=(',', ': ')): - return json.dumps(self.to_dict(), indent=indent, - sort_keys=sort_keys, separators=separators) - - def to_yaml(self, clean_up=False, long_form=False): - return cfn_flip.to_yaml(self.to_json(), clean_up=clean_up, - long_form=long_form) - - def __eq__(self, other): - if isinstance(other, Template): - return (self.to_json() == other.to_json()) - else: - return False - - def __ne__(self, other): - return (not self.__eq__(other)) - - def __hash__(self): - return hash(self.to_json()) - - -class Export(AWSHelperFn): - def __init__(self, name): - self.data = { - 'Name': name, - } - - -class Output(AWSDeclaration): - props = { - 'Description': (basestring, False), - 'Export': (Export, False), - 'Value': (basestring, True), - } - - def add_to_template(self): - # Bound it to template if we know it - if self.template is not None: - self.template.add_output(self) - - -class Parameter(AWSDeclaration): - STRING_PROPERTIES = ['AllowedPattern', 'MaxLength', 'MinLength'] - NUMBER_PROPERTIES = ['MaxValue', 'MinValue'] - props = { - 'Type': (basestring, True), - 'Default': ((basestring, int, float), False), - 'NoEcho': (bool, False), - 'AllowedValues': (list, False), - 'AllowedPattern': (basestring, False), - 'MaxLength': (validators.positive_integer, False), - 'MinLength': (validators.positive_integer, False), - 'MaxValue': (validators.integer, False), - 'MinValue': (validators.integer, False), - 'Description': (basestring, False), - 'ConstraintDescription': (basestring, False), - } - - def add_to_template(self): - # Bound it to template if we know it - if self.template is not None: - self.template.add_parameter(self) - - def validate_title(self): - if len(self.title) > PARAMETER_TITLE_MAX: - raise ValueError("Parameter title can be no longer than " - "%d characters" % PARAMETER_TITLE_MAX) - super(Parameter, self).validate_title() - - def validate(self): - def check_type(t, v): - try: - t(v) - return True - except ValueError: - return False - - # Validate the Default parameter value - default = self.properties.get('Default') - if default: - error_str = ("Parameter default type mismatch: expecting " - "type %s got %s with value %r") - # Get the Type specified and see whether the default type - # matches (in the case of a String Type) or can be coerced - # into one of the number formats. - param_type = self.properties.get('Type') - if param_type == 'String' and not isinstance(default, basestring): - raise ValueError(error_str % - ('String', type(default), default)) - elif param_type == 'Number': - allowed = [float, int] - # See if the default value can be coerced into one - # of the correct types - if not any(map(lambda x: check_type(x, default), allowed)): - raise ValueError(error_str % - (param_type, type(default), default)) - elif param_type == 'List': - if not isinstance(default, basestring): - raise ValueError(error_str % - (param_type, type(default), default)) - allowed = [float, int] - dlist = default.split(",") - for d in dlist: - # Verify the split array are all numbers - if not any(map(lambda x: check_type(x, d), allowed)): - raise ValueError(error_str % - (param_type, type(d), dlist)) - - if self.properties['Type'] != 'String': - for p in self.STRING_PROPERTIES: - if p in self.properties: - raise ValueError("%s can only be used with parameters of " - "the String type." % p) - if self.properties['Type'] != 'Number': - for p in self.NUMBER_PROPERTIES: - if p in self.properties: - raise ValueError("%s can only be used with parameters of " - "the Number type." % p) diff --git a/troposphere/aws_objects.py b/troposphere/aws_objects.py new file mode 100644 index 000000000..db839ce54 --- /dev/null +++ b/troposphere/aws_objects.py @@ -0,0 +1,28 @@ +"""Base AWS Objects""" + +import troposphere.validators as validators +from typing import Dict + + +class AWSObject(): + pass + + +class AWSProperty(AWSObject): + pass + + +class AWSResource(AWSObject): + def __init__(self, logical_name: str): + self.logical_name: str = None + + self.properties: Dict[str, AWSProperty] = {} + + @property + def logical_name(self) -> str: + return self._logical_name + + @logical_name.setter + def logical_name(self, logical_name: str) -> None: + validators.alphanumeric(logical_name) + self._logical_name = logical_name From b5f6c64de07971592e347c9405828e57c4150fee Mon Sep 17 00:00:00 2001 From: DrLuke Date: Wed, 20 Feb 2019 23:40:10 +0100 Subject: [PATCH 61/62] Add intrinsic functions --- tests/test_instrinsic_functions.py | 189 +++++++++++++++++++++++++++++ troposphere/intrinsic_functions.py | 130 ++++++++++++++++++++ 2 files changed, 319 insertions(+) create mode 100644 tests/test_instrinsic_functions.py create mode 100644 troposphere/intrinsic_functions.py diff --git a/tests/test_instrinsic_functions.py b/tests/test_instrinsic_functions.py new file mode 100644 index 000000000..4a6580899 --- /dev/null +++ b/tests/test_instrinsic_functions.py @@ -0,0 +1,189 @@ +import unittest + +from troposphere.intrinsic_functions import * + + +class TestIntrinsicFunctions(unittest.TestCase): + def test_to_dict(self): + """Test all three modes of to_dict""" + # Function with primitive parameter + expected = { + "Ref": "foo" + } + self.assertDictEqual(expected, Ref("foo").to_dict()) + + # Nested functions + expected = { + "Fn::ImportValue": {"Fn::Join": [" ", ["join", "me"]]} + } + self.assertDictEqual(expected, FnImportValue(FnJoin(" ", ["join", "me"])).to_dict()) + + # List with nested function + expected = { + "Fn::Base64": {"Ref": "foo"} + } + self.assertDictEqual(expected, FnBase64(Ref("foo")).to_dict()) + + # List with mixed nested and primitive arguments and sublists + expected = { + "Fn::Base64": {"Fn::Join": [" ", [{"Ref": "foo"}, "bar", {"Ref": {"Ref": "baz"}}]]} + } + self.assertDictEqual(expected, FnBase64(FnJoin(" ", [Ref("foo"), "bar", Ref(Ref("baz"))])).to_dict()) + + def test_fn_base64(self): + expected = { + "Fn::Base64": "SomeString" + } + self.assertDictEqual(expected, FnBase64("SomeString").to_dict()) + + def test_fn_cidr(self): + expected = { + "Fn::Cidr": [ + "10.10.0.0/16", + 123, + 16 + ] + } + self.assertDictEqual(expected, FnCidr("10.10.0.0/16", 123, 16).to_dict()) + with self.assertRaises(ValueError): + FnCidr("10.10.0.0/16", 99999, 16) + + def test_fn_findinmap(self): + expected = { + "Fn::FindInMap": [ + "foo", + "bar", + "baz" + ] + } + self.assertDictEqual(expected, FnFindInMap("foo", "bar", "baz").to_dict()) + + def test_fn_getatt(self): + expected = { + "Fn::GetAtt": [ + "foo", + "bar" + ] + } + self.assertDictEqual(expected, FnGetAtt("foo", "bar").to_dict()) + + def test_fn_getazs(self): + expected = { + "Fn::GetAZs": "foo" + } + self.assertDictEqual(expected, FnGetAZs("foo").to_dict()) + + def test_fn_importvalue(self): + expected = { + "Fn::ImportValue": "foo" + } + self.assertDictEqual(expected, FnImportValue("foo").to_dict()) + + def test_fn_join(self): + expected = { + "Fn::Join": [ + " ", + ["Join", "this", "please"] + ] + } + self.assertDictEqual(expected, FnJoin(" ", ["Join", "this", "please"]).to_dict()) + + def test_fn_select(self): + expected = { + "Fn::Select": [ + 1, + ["val0", "val1", "val2"] + ] + } + self.assertDictEqual(expected, FnSelect(1, ["val0", "val1", "val2"]).to_dict()) + + def test_ref(self): + expected = { + "Ref": "foo" + } + self.assertDictEqual(expected, Ref("foo").to_dict()) + + def test_fn_if(self): + expected = { + "Fn::If": [ + "foo", + "bar", + "baz" + ] + } + self.assertDictEqual(expected, FnIf("foo", "bar", "baz").to_dict()) + + def test_fn_sub(self): + expected = { + "Fn::Sub": "foo" + } + self.assertDictEqual(expected, FnSub("foo").to_dict()) + expected = { + "Fn::Sub": [ + "foo", + { + "foo1", "bar1", + "foo2", "bar2" + } + ] + } + self.assertDictEqual(expected, FnSub("foo", {"foo1", "bar1", "foo2", "bar2"}).to_dict()) + + def test_fn_split(self): + expected = { + "Fn::Split": [ + "foo", + "bar" + ] + } + self.assertDictEqual(expected, FnSplit("foo", "bar").to_dict()) + + def test_fn_transform(self): + expected = { + "Fn::Transform": { + "Name": "foo", + "Parameters": { + "param1": "foo", + "param2": "bar" + } + } + } + self.assertDictEqual(expected, FnTransform("foo", {"param1": "foo", "param2": "bar"}).to_dict()) + + def test_fn_and(self): + expected = { + "Fn::And": [ + {"Ref": "foo"}, + {"Ref": "bar"} + ] + } + self.assertDictEqual(expected, FnAnd([Ref("foo"), Ref("bar")]).to_dict()) + + def test_fn_equals(self): + expected = { + "Fn::Equals": [ + {"Ref": "foo"}, + {"Ref": "bar"} + ] + } + self.assertDictEqual(expected, FnEquals(Ref("foo"), Ref("bar")).to_dict()) + + def test_fn_not(self): + expected = { + "Fn::Not": [ + {"Ref": "foo"} + ] + } + self.assertDictEqual(expected, FnNot(Ref("foo")).to_dict()) + + def test_fn_or(self): + expected = { + "Fn::Or": [ + {"Ref": "foo"}, + {"Ref": "bar"} + ] + } + self.assertDictEqual(expected, FnOr([Ref("foo"), Ref("bar")]).to_dict()) + +if __name__ == '__main__': + unittest.main() diff --git a/troposphere/intrinsic_functions.py b/troposphere/intrinsic_functions.py new file mode 100644 index 000000000..f6c276b77 --- /dev/null +++ b/troposphere/intrinsic_functions.py @@ -0,0 +1,130 @@ +"""Intrinsic template functions + +Documentation: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/intrinsic-function-reference-conditions.html +""" + +from __future__ import annotations # Needed for self-reference of classes as __init__ type-annotation! +from typing import Dict, Any, Union, List, TypeVar + + +class IntrinsicFunction(): + """Base intrinsic function""" + + def __init__(self, key: str, content: Any) -> None: + self._key: str = key + self._content: Any = content + + def to_dict(self) -> Dict: + if isinstance(self._content, IntrinsicFunction): + return {self._key: self._content.to_dict()} + elif isinstance(self._content, list): + # Call to_dict() on all functions in list + outlist = [x.to_dict() if isinstance(x, IntrinsicFunction) else x for x in self._content] + # Call to_dict() on all items of lists in list + outlist = [[x.to_dict() if isinstance(x, IntrinsicFunction) else x for x in sublist] if isinstance(sublist, list) else sublist for sublist in outlist] + return {self._key: outlist} + else: + return {self._key: self._content} + + +class FnBase64(IntrinsicFunction): + def __init__(self, value_to_encode: Union[str, IntrinsicFunction]): + super(FnBase64, self).__init__("Fn::Base64", value_to_encode) + + +class FnCidr(IntrinsicFunction): + def __init__(self, ip_block: Union[str, FnSelect, Ref], count: Union[int, FnSelect, Ref], + cidr_bits: Union[int, FnSelect, Ref]): + if isinstance(count, int) and not 1 <= count <= 256: + raise ValueError(f"Count must be between 1 and 256, is: {count}") + # TODO: Further check on cidr_bits? Needs to distinguish between v4/v6 + + super(FnCidr, self).__init__("Fn::Cidr", [ip_block, count, cidr_bits]) + + +class FnFindInMap(IntrinsicFunction): + def __init__(self, map_name: Union[str, FnFindInMap], top_level_key: Union[str, FnFindInMap], + second_level_key: Union[str, FnFindInMap]): + super(FnFindInMap, self).__init__("Fn::FindInMap", [map_name, top_level_key, second_level_key]) + + +class FnGetAtt(IntrinsicFunction): + def __init__(self, logical_name_of_resource: str, attribute_name: Union[str, Ref]): + super(FnGetAtt, self).__init__("Fn::GetAtt", [logical_name_of_resource, attribute_name]) + + +class FnGetAZs(IntrinsicFunction): + def __init__(self, region: Union[str, Ref]): + super(FnGetAZs, self).__init__("Fn::GetAZs", region) + + +class FnImportValue(IntrinsicFunction): + def __init__(self, shared_value_to_import: Union[ + str, FnBase64, FnFindInMap, FnIf, FnJoin, FnSelect, FnSplit, FnSub, Ref]): + super(FnImportValue, self).__init__("Fn::ImportValue", shared_value_to_import) + + +class FnJoin(IntrinsicFunction): + def __init__(self, delimiter: str, values: List[Union[ + str, FnBase64, FnFindInMap, FnGetAtt, FnGetAZs, FnIf, FnImportValue, FnJoin, FnSelect, FnSplit, FnSub, Ref]]): + super(FnJoin, self).__init__("Fn::Join", [delimiter, values]) + + +class FnSelect(IntrinsicFunction): + def __init__(self, index: Union[int, FnFindInMap, Ref], list_of_objects: List[Union[ + str, int, float, dict, list, bool, FnFindInMap, FnGetAtt, FnGetAZs, FnIf, FnSplit]]): + super(FnSelect, self).__init__("Fn::Select", [index, list_of_objects]) + + +class Ref(IntrinsicFunction): + def __init__(self, logical_name: Union[str, FnSelect, Ref]): + super(Ref, self).__init__("Ref", logical_name) + + +class FnIf(IntrinsicFunction): + def __init__(self, condition_name: str, value_if_true: Any, value_if_false: Any): + super(FnIf, self).__init__("Fn::If", [condition_name, value_if_true, value_if_false]) + + +substitution_type = TypeVar("substitution_type", str, FnBase64, FnFindInMap, FnGetAtt, FnGetAZs, FnIf, + FnImportValue, FnJoin, FnSelect, Ref) + + +class FnSub(IntrinsicFunction): + def __init__(self, string: str, substitutions: Dict[substitution_type, substitution_type] = None): + if substitutions is None: + super(FnSub, self).__init__("Fn::Sub", string) + else: + super(FnSub, self).__init__("Fn::Sub", [string, substitutions]) + + +class FnSplit(IntrinsicFunction): + def __init__(self, delimiter: str, source_string: Union[ + str, FnBase64, FnFindInMap, FnGetAtt, FnGetAZs, FnIf, FnImportValue, FnJoin, FnSelect, FnSub, Ref]): + super(FnSplit, self).__init__("Fn::Split", [delimiter, source_string]) + + +class FnTransform(IntrinsicFunction): + def __init__(self, macro_name: str, parameters: Dict[str, str]): + super(FnTransform, self).__init__("Fn::Transform", {"Name": macro_name, "Parameters": parameters}) + + +class FnAnd(IntrinsicFunction): + def __init__(self, conditions: List[Union[FnFindInMap, Ref, FnAnd, FnEquals, FnIf, FnNot, FnOr]]): + super(FnAnd, self).__init__("Fn::And", conditions) + + +class FnEquals(IntrinsicFunction): + def __init__(self, value1: Union[FnFindInMap, Ref, FnAnd, FnEquals, FnIf, FnNot, FnOr], + value2: Union[FnFindInMap, Ref, FnAnd, FnEquals, FnIf, FnNot, FnOr]): + super(FnEquals, self).__init__("Fn::Equals", [value1, value2]) + + +class FnNot(IntrinsicFunction): + def __init__(self, condition: Union[FnFindInMap, Ref, FnAnd, FnEquals, FnIf, FnNot, FnOr]): + super(FnNot, self).__init__("Fn::Not", [condition]) + + +class FnOr(IntrinsicFunction): + def __init__(self, conditions: List[Union[FnFindInMap, Ref, FnAnd, FnEquals, FnIf, FnNot, FnOr]]): + super(FnOr, self).__init__("Fn::Or", conditions) From 0e17fe511f38bcea94509706b4ab474eb4242e32 Mon Sep 17 00:00:00 2001 From: DrLuke Date: Thu, 21 Feb 2019 00:56:10 +0100 Subject: [PATCH 62/62] Add intrinsic functions to generated code --- troposphere_gen/policy.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/troposphere_gen/policy.py b/troposphere_gen/policy.py index 4758bbf32..518d46cbb 100644 --- a/troposphere_gen/policy.py +++ b/troposphere_gen/policy.py @@ -114,8 +114,9 @@ def module_head_format(self, moduledata: ModuleData, specification: Specificatio ) imports = "\nfrom troposphere.aws_objects import AWSProperty, AWSObject\n" - imports += "from typing import Dict, List\n" + imports += "from typing import Dict, List, Union\n" imports += "from troposphere import validators\n" + imports += "from troposphere.intrinsic_functions import IntrinsicFunction\n" if modulename is not "common": imports += "from troposphere.common import Tag\n" @@ -144,13 +145,13 @@ def class_format(self, classdata: ClassData) -> str: init_code = " def __init__(self,\n" for name, prop in classdata.subproperties.items(): conflicted = name in classdata.conflictedproperties - init_code += f" {cc_to_sc(name)}: {self.get_type(prop, conflicted)} = None,\n" + init_code += f" {cc_to_sc(name)}: Union[{self.get_type(prop, conflicted)}, IntrinsicFunction] = None,\n" init_code += " ):\n" # Generate field declarations for name, prop in classdata.subproperties.items(): conflicted = name in classdata.conflictedproperties - init_code += f" self._{cc_to_sc(name)}: {self.get_type(prop, conflicted)} = None\n" + init_code += f" self._{cc_to_sc(name)}: Union[{self.get_type(prop, conflicted)}, IntrinsicFunction] = None\n" init_code += f" self.{cc_to_sc(name)} = {cc_to_sc(name)}\n" property_funcs = "" @@ -230,10 +231,12 @@ def property_setter(self, propertydata: Property, classdata: ClassData, conflict return ( f" @{cc_to_sc(propertydata.name)}.setter\n" - f" def {cc_to_sc(propertydata.name)}(self, value: {self.get_type(propertydata, conflicted)}) -> None:\n" + f" def {cc_to_sc(propertydata.name)}(self, value: Union[{self.get_type(propertydata, conflicted)}, IntrinsicFunction]) -> None:\n" f" if value is None:\n" f" self._{cc_to_sc(propertydata.name)} = None\n" f" return\n" + f" if isinstance(value, IntrinsicFunction):\n" + f" self._{cc_to_sc(propertydata.name)} = value\n" f"{type_check}" f" self._{cc_to_sc(propertydata.name)} = value\n" )