diff --git a/README.md b/README.md
index 19a7eb6..fb82cec 100644
--- a/README.md
+++ b/README.md
@@ -46,6 +46,10 @@ Change list items
```python
metadata.tags = ["tag1", "tag2"]
+metadata.tags[1] = "another tag"
+metadata.tags.append("new tag")
+metadata.tags.remove("tag1")
+metadata.tags.pop()
```
Get numeric items (return int or float)
@@ -93,10 +97,21 @@ metadata.point_of_contact.contact_name = "First and Last Name"
metadata.point_of_contact.email = "email@address.com"
```
+Remove all items from the geoprocessing history
+```python
+metadata.rm_gp_history()
+```
+
+
Saving the changes back to the file
```python
-metadata.finish() # save the metadata back to the original source feature class and cleanup. Without calling finish(), your edits are NOT saved!
+metadata.save() # save the metadata back to file.
+metadata.cleanup() # remove all temporary files.
+```
+or
+```
+metadata.finish() # save() and cleanup() as one call
```
If you want to enable automatic updates of your metadata (feature classes only) call.
```python
@@ -145,6 +160,7 @@ Supported items
|Metadata File Identifier|file_identifier|String|Metadata/Details/File Idnetifier|mdFileID|
|Dataset URI|dataset_uri|String|Metadata/Details/Dataset URI|dataSetURI|
|Resource Label|resource_label|String|Resource/Fields/Details/Label|eainfo/detailed/enttyp/enttypl|
+|Format|format|String|Resource/Distribution/Distribution Format/Format Name|distInfo/distFormat/formatName|
Contact items
---------------
diff --git a/arcpy_metadata/elements.py b/arcpy_metadata/elements.py
index 741d69a..27d5f3c 100644
--- a/arcpy_metadata/elements.py
+++ b/arcpy_metadata/elements.py
@@ -1,279 +1,345 @@
-# TODO: Add category item
-# category = dataIdInfo/tpCat/TopicCatCd
-
-elements = {
- "abstract": {
- "path": "dataIdInfo/idAbs",
- "type": "string"},
-
- "alternate_title": {
- "path": "dataIdInfo/idCitation/resAltTitle",
- "type": "string"},
-
- "citation": {
- "path": "dataIdInfo/idCitation/otherCitDet",
- "type": "string"},
-
- "citation_contact": {
- "path": "dataIdInfo/idCitation/citRespParty",
- "type": "contact"},
-
- "credits": {
- "path": "dataIdInfo/idCredit",
- "type": "string"},
-
- "dataset_uri": {
- "path": "dataSetURI",
- "type": "string"},
-
- "distance_resolution": { # TODO: Allow to add units
- "path": "dataIdInfo/dataScale/scaleDist/value",
- "type": "string"},
-
- "download": { # TODO: Allow to add multiple download links with names
- "path": "distInfo/distTranOps/onLineSrc/linkage",
- "type": "string"},
-
- "extent_description": {
- "path": "dataIdInfo/dataExt/exDesc",
- "type": "string"},
-
- "external_link": {
- "path": "dataIdInfo/idCitation/citOnlineRes/linkage",
- "type": "string"},
-
- "file_identifier": {
- "path": "mdFileID",
- "type": "string"},
-
- "identifier_code1": {
- "path": "dataIdInfo/idCitation/citId/identCode",
- "type": "string"},
-
- "identifier_code2": {
- "path": "dataIdInfo/idCitation/citId/identAuth/citId/identCode",
- "type": "string"},
-
- "identifier_code3": {
- "path": "dataIdInfo/idCitation/citId/identAuth/citId/identAuth/citId/identCode",
- "type": "string"},
-
- "identifier_code4": {
- "path": "dqInfo/dataLineage/dataSource/srcRefSys/identAuth/citId/identCode",
- "type": "string"},
-
- "language": {
- "path": "dataIdInfo/dataLang",
- "type": "language"},
-
- "last_update": {
- "path": "dataIdInfo/idCitation/date/reviseDate",
- "type": "date"},
-
- "license": {
- "path": "dataIdInfo/resConst/LegConsts/useLimit",
- "type": "string"},
-
- "limitation": { #TODO: does read correctly when entered though ArcGIS Online. They are stored in a seperated resConst element
- "path": "dataIdInfo/resConst/Consts/useLimit",
- "type": "string"},
-
- #"locals": {
- # "path": "Esri/locales/locale",
- # "type": "local"},
-
- "maintenance_contact": {
- "path": "dataIdInfo/maintCont",
- "type": "contact"},
-
- "max_scale": {
- "path": "Esri/scaleRange/maxScale",
- "type": "integer"},
-
- "metadata_language": {
- "path": "dataIdInfo/mdLang",
- "type": "language"},
-
- "min_scale": {
- "path": "Esri/scaleRange/minScale",
- "type": "integer"},
-
- "place_keywords": {
- "path": "dataIdInfo/searchKeys[last()]",
- "tagname": "keyword",
- "type": "list"},
-
- "point_of_contact": {
- "path": "dataIdInfo/idPoC",
- "type": "contact"},
-
- "purpose": {
- "path": "dataIdInfo/idPurp",
- "type": "string"},
-
- "resource_label": {
- "path": "eainfo/detailed/enttyp/enttypl",
- "type": "string"},
-
- "scale_resolution": {
- "path": "dataIdInfo/dataScale/equScale/rfDenom",
- "type": "integer"},
-
- "source": {
- "path": "dqInfo/dataLineage/dataSource/srcDesc",
- "type": "string"},
-
- "supplemental_information": {
- "path": "dataIdInfo/suppInfo",
- "type": "string"},
-
- "title": {
- "path": "dataIdInfo/idCitation/resTitle",
- "type": "string"},
-
- "tags": {
- "path": "dataIdInfo/searchKeys[last()]",
- "tagname": "keyword",
- "type": "list"},
-
- "temporal_extent_description": {
- "path": "dataIdInfo/dataExt/tempDesc",
- "type": "string"},
-
- "temporal_extent_end": {
- "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Period/tmEnd",
- "type": "date"},
-
- "temporal_extent_instance": {
- "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Instant/tmPosition",
- "type": "date"},
-
- "temporal_extent_start": {
- "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Period/tmBegin",
- "type": "date"},
-
- #"update_frequency": {
- # "path": "dataIdInfo/resMaint/maintFreq/MaintFreqCd",
- # "type": "string"},
-
- "update_frequency_description": {
- "path": "dataIdInfo/resMaint/usrDefFreq/duration",
- "type": "string"}
-
- }
-
contact_elements = {
- "role_p": {
- "parent": "element",
- "path": "role"},
-
- #"role": {
- # "parent": "role_p",
- # "path": "RoleCd"},
-
- "contact_name": {
- "parent": "element",
- "path": "rpIndName"},
-
- "position": {
- "parent": "element",
- "path": "rpPosName"},
-
- "organization": {
- "parent": "element",
- "path": "rpOrgName"},
-
- "contact_info": {
- "parent": "element",
- "path": "rpCntInfo"},
-
- "address_p": {
- "parent": "contact_info",
- "path": "cntAddress"},
-
- "email": {
- "parent": "address_p",
- "path": "eMailAdd"},
-
- "address": {
- "parent": "address_p",
- "path": "delPoint"},
-
- "city": {
- "parent": "address_p",
- "path": "city"},
-
- "state": {
- "parent": "address_p",
- "path": "adminArea"},
-
- "zip": {
- "parent": "address_p",
- "path": "postCode"},
- "country": {
- "parent": "address_p",
- "path": "country"},
+ "role": {
+ "path": "role/RoleCd",
+ "type": "attribute",
+ "key": "value",
+ "values": [("resource provider", "001"),
+ ("custodian", "002"),
+ ("owner", "003"),
+ ("user", "004"),
+ ("distributer", "005"),
+ ("originator", "006"),
+ ("point of contact", "007"),
+ ("principal investigator", "008"),
+ ("processor", "009"),
+ ("publisher", "010"),
+ ("author", "011")]},
+
+ "contact_name": {
+ "path": "rpIndName",
+ "type": "string"},
+
+ "position": {
+ "path": "rpPosName",
+ "type": "string"},
+
+ "organization": {
+ "path": "rpOrgName",
+ "type": "string"},
+
+ "contact_info": {
+ "path": "rpCntInfo",
+ "type": "string"},
+
+ "email": {
+ "path": "rpCntInfo/cntAddress/eMailAdd",
+ "type": "string"},
+
+ "address_type": {
+ "path": "rpCntInfo/cntAddress",
+ "type": "attribute",
+ "key": "addressType",
+ "values": [("postal", "postal"),
+ ("physical", "physical"),
+ ("both", "both")]},
+
+ "address": {
+ "path": "rpCntInfo/cntAddress/delPoint",
+ "type": "string"},
+
+ "city": {
+ "path": "rpCntInfo/cntAddress/city",
+ "type": "string"},
+
+ "state": {
+ "path": "rpCntInfo/cntAddress/adminArea",
+ "type": "string"},
+
+ "zip": {
+ "path": "rpCntInfo/cntAddress/postCode",
+ "type": "string"},
+
+ "country": {
+ "path": "rpCntInfo/cntAddress/country",
+ "type": "string"}, # TODO: make this a dropdown list for ISO2 code. Write to value
+
+ "phone_nb": {
+ "path": "rpCntInfo/voiceNum",
+ "type": "string"},
+
+ "fax_nb": {
+ "path": "rpCntInfo/faxNum",
+ "type": "string"},
+
+ "hours": {
+ "path": "rpCntInfo/cntHours",
+ "type": "string"},
+
+ "instructions": {
+ "path": "rpCntInfo/cntInstr",
+ "type": "string"},
+
+ # TODO: Make Online Resources a sub elemennt list
+ "link": {
+ "path": "rpCntInfo/cntOnlineRes/linkage",
+ "type": "string"},
+
+ "protocol": {
+ "path": "rpCntInfo/cntOnlineRes/protocol",
+ "type": "string"},
+
+ "profile": {
+ "path": "rpCntInfo/cntOnlineRes/appProfile",
+ "type": "string"},
+
+ "or_name": {
+ "path": "rpCntInfo/cntOnlineRes/orName",
+ "type": "string"},
+
+ "or_desc": {
+ "path": "rpCntInfo/cntOnlineRes/orDesc",
+ "type": "string"},
+
+ "or_function": {
+ "path": "orFunct/OnFunctCd",
+ "type": "attribute",
+ "key": "value",
+ "values": [("download", "001"),
+ ("information", "002"),
+ ("offline access", "003"),
+ ("order", "004"),
+ ("search", "005")]
+ }
+}
- "phone": {
- "parent": "contact_info",
- "path": "cntPhone"},
-
- "phone_nb": {
- "parent": "phone",
- "path": "voiceNum"},
-
- "fax_nb": {
- "parent": "phone",
- "path": "faxNum"},
-
- "hours": {
- "parent": "contact_info",
- "path": "cntHours"},
-
- "instructions": {
- "parent": "contact_info",
- "path": "cntInstr"},
-
- "online_resource": {
- "parent": "contact_info",
- "path": "cntOnlineRes"},
-
- "link": {
- "parent": "online_resource",
- "path": "linkage"},
-
- "protocol": {
- "parent": "online_resource",
- "path": "protocol"},
-
- "profile": {
- "parent": "online_resource",
- "path": "appProfile"},
-
- "or_name": {
- "parent": "online_resource",
- "path": "orName"},
+language_elements = {
+ "language": {
+ "path": "languageCode",
+ "type": "string"},
+
+ "country": {
+ "path": "countryCode",
+ "type": "string"}
+}
+
+online_resource_elements = {
+ "link": {
+ "path": "linkage",
+ "type": "string"},
+ "protocol": {
+ "path": "protocol",
+ "type": "string"},
+ "profile": {
+ "path": "appProfile",
+ "type": "string"},
+ "name": {
+ "path": "orName",
+ "type": "string"},
+ "description": {
+ "path": "orDesc",
+ "type": "string"},
+
+ "function": {
+ "path": "orFunct/OnFunctCd",
+ "type": "attribute",
+ "key": "value",
+ "values": [("download", "001"),
+ ("information", "002"),
+ ("offline access", "003"),
+ ("order", "004"),
+ ("search", "005")]
+ },
+}
- "or_desc": {
- "parent": "online_resource",
- "path": "orDesc"},
+elements = {
+ "abstract": {
+ "path": "dataIdInfo/idAbs",
+ "type": "string"},
+
+ "alternate_title": {
+ "path": "dataIdInfo/idCitation/resAltTitle",
+ "type": "string"},
+
+ # TODO: Add category item
+ # category = dataIdInfo/tpCat/TopicCatCd
+
+ "citation": {
+ "path": "dataIdInfo/idCitation/otherCitDet",
+ "type": "string"},
+
+ "citation_contact": {
+ "path": "dataIdInfo/idCitation/citRespParty",
+ "type": "parent_item",
+ "elements": contact_elements},
+
+ "credits": {
+ "path": "dataIdInfo/idCredit",
+ "type": "string"},
+
+ "dataset_uri": {
+ "path": "dataSetURI",
+ "type": "string"},
+
+ "distance_resolution": { # TODO: Allow to add units
+ "path": "dataIdInfo/dataScale/scaleDist/value",
+ "type": "string"},
+
+ "download": { #
+ "path": "distInfo/distTranOps/onLineSrc/linkage",
+ "type": "string",
+ "deprecated": "Use online_resource instead"},
+
+ "extent_description": {
+ "path": "dataIdInfo/dataExt/exDesc",
+ "type": "string"},
+
+ "external_link": {
+ "path": "dataIdInfo/idCitation/citOnlineRes/linkage",
+ "type": "string"},
+
+ "format": {
+ "path": "distInfo/distFormat/formatName",
+ "type": "string"
+ },
+
+ "file_identifier": {
+ "path": "mdFileID",
+ "type": "string",
+ "sync": False},
+
+ "identifier_code1": {
+ "path": "dataIdInfo/idCitation/citId/identCode",
+ "type": "string"},
+
+ "identifier_code2": {
+ "path": "dataIdInfo/idCitation/citId/identAuth/citId/identCode",
+ "type": "string"},
+
+ "identifier_code3": {
+ "path": "dataIdInfo/idCitation/citId/identAuth/citId/identAuth/citId/identCode",
+ "type": "string"},
+
+ "identifier_code4": {
+ "path": "dqInfo/dataLineage/dataSource/srcRefSys/identAuth/citId/identCode",
+ "type": "string"},
+
+ "language": {
+ "path": "dataIdInfo/dataLang",
+ "type": "language",
+ "elements": language_elements},
+
+ "last_update": {
+ "path": "dataIdInfo/idCitation/date/reviseDate",
+ "type": "date"},
+
+ "license": {
+ "path": "dataIdInfo/resConst/LegConsts/useLimit",
+ "type": "string"},
+
+ "limitation": { #TODO: does read correctly when entered though ArcGIS Online. They are stored in a seperated resConst element
+ "path": "dataIdInfo/resConst/Consts/useLimit",
+ "type": "string"},
+
+ #"locals": {
+ # "path": "Esri/locales/locale",
+ # "type": "local"},
+
+ "maintenance_contact": {
+ "path": "dataIdInfo/maintCont",
+ "type": "parent_item",
+ "elements": contact_elements},
+
+ "max_scale": {
+ "path": "Esri/scaleRange/maxScale",
+ "type": "integer"},
+
+ "metadata_language": {
+ "path": "dataIdInfo/mdLang",
+ "type": "language"},
+
+ "min_scale": {
+ "path": "Esri/scaleRange/minScale",
+ "type": "integer"},
+
+ "online_resource": {
+ "path": "distInfo/distTranOps",
+ "tagname": "onLineSrc",
+ "type": "object_list",
+ "elements": online_resource_elements},
+
+ "place_keywords": {
+ "path": "dataIdInfo/searchKeys[last()]",
+ "tagname": "keyword",
+ "type": "list"},
+
+ "point_of_contact": {
+ "path": "dataIdInfo/idPoC",
+ "type": "parent_item",
+ "elements": contact_elements},
+
+ "purpose": {
+ "path": "dataIdInfo/idPurp",
+ "type": "string"},
+
+ "resource_label": {
+ "path": "eainfo/detailed/enttyp/enttypl",
+ "type": "string"},
+
+ "scale_resolution": {
+ "path": "dataIdInfo/dataScale/equScale/rfDenom",
+ "type": "integer"},
+
+ "source": {
+ "path": "dqInfo/dataLineage/dataSource/srcDesc",
+ "type": "string"},
+
+ "supplemental_information": {
+ "path": "dataIdInfo/suppInfo",
+ "type": "string"},
+
+ "title": {
+ "path": "dataIdInfo/idCitation/resTitle",
+ #"path": "Esri/DataProperties/itemProps/itemName",
+ "type": "string",
+ "sync": False},
+
+ "tags": {
+ "path": "dataIdInfo/searchKeys[last()]",
+ "tagname": "keyword",
+ "type": "list"},
+
+ "temporal_extent_description": {
+ "path": "dataIdInfo/dataExt/tempDesc",
+ "type": "string"},
+
+ "temporal_extent_end": {
+ "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Period/tmEnd",
+ "type": "date"},
+
+ "temporal_extent_instance": {
+ "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Instant/tmPosition",
+ "type": "date"},
+
+ "temporal_extent_start": {
+ "path": "dataIdInfo/dataExt/tempEle/exTemp/TM_Period/tmBegin",
+ "type": "date"},
+
+ "update_frequency": {
+ "path": "dataIdInfo/resMaint/maintFreq/MaintFreqCd",
+ "type": "attribute",
+ "key": "value",
+ "values": [("continual", "001"),
+ ("daily", "002"),
+ ("weekly", "003"),
+ ("fortnightly", "004"),
+ ("monthly", "005")]},
+
+ "update_frequency_description": {
+ "path": "dataIdInfo/resMaint/usrDefFreq/duration",
+ "type": "string"}
+
+}
- "or_function": {
- "parent": "online_resource",
- "path": "orFunct"} #,
- #"or_function_cd": {
- # "parent": "or_function",
- # "path": "OnFunctCd"}
- }
-language_elements = {
- "language": {
- "parent": "element",
- "path": "languageCode"},
-
- "country": {
- "parent": "element",
- "path": "countryCode"}
- }
diff --git a/arcpy_metadata/metadata_constructors.py b/arcpy_metadata/metadata_constructors.py
index 7ec3a99..233483b 100644
--- a/arcpy_metadata/metadata_constructors.py
+++ b/arcpy_metadata/metadata_constructors.py
@@ -1,6 +1,103 @@
import os
+import copy
import xml.etree.ElementTree as ET
-from arcpy_metadata.languages import languages
+
+
+class MetadataValueListHelper(object):
+ """
+ A helper class to have value list items behave like a python lists
+ """
+ def __init__(self, list_items):
+ if isinstance(list_items, MetadataValueListConstructor):
+ self.list_items = list_items
+ else:
+ raise TypeError("Must be an instance of MetadataListConstructor")
+
+ def __getitem__(self, index):
+ return self.list_items.current_items[index].text
+
+ def __setitem__(self, index, value):
+ self.list_items.current_items[index].text = value
+
+ def __repr__(self):
+ return repr(self.list_items.value)
+
+ def __len__(self):
+ return len(self.list_items.current_items)
+
+ def append(self, value):
+ """
+ Append given item to list
+ :param value:
+ :return:
+ """
+ self.list_items.append(value)
+
+ def remove(self, value):
+ """
+ Remove given item from list
+ :param value:
+ :return:
+ """
+ self.list_items.remove(value)
+
+ def pop(self):
+ """
+ Remove last list item
+ :return: object
+ """
+ return self.list_items.pop()
+
+ def sort(self):
+ """
+ Sort list items
+ :return: list
+ """
+ return self.list_items.sort()
+
+class MetadataObjectListHelper(object):
+ """
+ A helper class to have value list items behave like a python lists
+ """
+ def __init__(self, list_objects):
+ if isinstance(list_objects, MetadataObjectListConstructor):
+ self.list_objects = list_objects
+ else:
+ raise TypeError("Must be an instance of MetadataObjectListConstructor")
+
+ def __getitem__(self, index):
+ return self.list_objects.current_items[index]
+
+ def __setitem__(self, index, value):
+ self.list_objects.current_items[index] = value
+
+ def __repr__(self):
+ return repr(self.list_objects.current_items)
+
+ def __len__(self):
+ return len(self.list_objects.current_items)
+
+ def new(self):
+ """
+ Add a new object to the list
+ :return:
+ """
+ self.list_objects.new()
+
+ def remove(self, value):
+ """
+ Remove given item from list
+ :param value:
+ :return:
+ """
+ self.list_objects.remove(value)
+
+ def pop(self):
+ """
+ Remove last list item
+ :return: object
+ """
+ return self.list_objects.pop()
class MetadataItemConstructor(object):
@@ -10,6 +107,7 @@ class MetadataItemConstructor(object):
path = None
value = ""
+ sync = True
def __init__(self, parent=None):
self.parent = parent
@@ -26,6 +124,7 @@ def __init__(self, parent=None):
else:
self.value = self.parent.elements.find(self.path).text
self.attributes = self.parent.elements.find(self.path).attrib
+ self.attributes["Sync"] = "FALSE"
@property
def attributes(self):
@@ -97,7 +196,7 @@ def _require_tree_elements(self):
done = True
-class MetadataListConstructor(MetadataItemConstructor):
+class MetadataValueListConstructor(MetadataItemConstructor):
"""
A metadata item for groups of items (like tags). Define the root element (self.path) and then the name of the
subitem to store there (self.tag_name) and you can use list-like methods to edit the group
@@ -115,7 +214,7 @@ def __init__(self, parent=None, tagname=None, path=None):
if path:
self.path = path
- super(MetadataListConstructor, self).__init__(parent)
+ super(MetadataValueListConstructor, self).__init__(parent)
self.current_items = []
values = []
@@ -135,18 +234,18 @@ def value(self, v):
self._removeall()
if v is None or v == "":
pass
- elif isinstance(v, list):
+ elif isinstance(v, (list, MetadataValueListHelper)):
for value in v:
- self._append(value)
+ self.append(value)
else:
raise RuntimeWarning("Input value must be a List or None")
- def _append(self, item):
+ def append(self, item):
"""
Adds an individual item to the section
:param item: the text that will be added to the multi-item section, wrapped in the appropriate tag
configured on parent object
- :return: None
+ :return:
"""
element = ET.Element(self.tag_name)
@@ -154,7 +253,44 @@ def _append(self, item):
self.current_items.append(element)
self.element._children = self.current_items
+ def pop(self):
+ """
+ Remove the last element in element tree
+ :return: object
+ """
+
+ item_to_remove = None
+
+ for i in self.current_items:
+ item_to_remove = i
+
+ j = copy.deepcopy(item_to_remove)
+
+ if item_to_remove is not None:
+ self.current_items.remove(item_to_remove)
+
+ return j
+
+ def remove(self, item):
+ """
+ Remove the given item from element tree
+ :param item:
+ :return:
+ """
+ items_to_remove = []
+
+ for i in self.current_items:
+ if i.text == item:
+ items_to_remove.append(i)
+
+ for i in items_to_remove:
+ self.current_items.remove(i)
+
def _removeall(self):
+ """
+ removes all items from element tree
+ :return:
+ """
items_to_remove = []
for i in self.current_items:
@@ -163,62 +299,102 @@ def _removeall(self):
for i in items_to_remove:
self.current_items.remove(i)
+ def sort(self):
+ """
+ sort items
+ :return:
+ """
+ return self.current_items.sort()
-class MetadataItemsConstructor(MetadataItemConstructor):
+
+class MetadataObjectListConstructor(MetadataItemConstructor):
"""
- A helper objects for more complex items like Locals or Contacts.
- This object will allow to iterage though multiple items of the same type
+ A metadata item for groups of items (like tags). Define the root element (self.path) and then the name of the
+ subitem to store there (self.tag_name) and you can use list-like methods to edit the group
"""
- def __init__(self, parent, path):
- self.path = os.path.dirname(path)
- self.tag_name = os.path.basename(path)
- super(MetadataItemsConstructor, self).__init__(parent)
- self.path = path
- self.elements = self.parent.elements.findall(self.path)
- self.value = self.elements
+ tag_name = None
+ current_items = []
+ path = None
- @property
- def value(self):
- return self.elements
+ def __init__(self, parent=None, tagname=None, path=None, child_elements=None):
- @value.setter
- def value(self, v):
- if not hasattr(self, 'elements'):
- self.elements = self.parent.elements.findall(self.path)
+ self.child_elements = child_elements
- self._removeall()
- if v is None:
- pass
- elif isinstance(v, list):
- for value in v:
- self._append(value)
- else:
- raise RuntimeWarning("Input value must be a List or None")
+ if not self.tag_name:
+ self.tag_name = tagname
+
+ if path:
+ self.path = path
- def _append(self, element):
+ super(MetadataObjectListConstructor, self).__init__(parent)
+
+ self.current_items = []
+ for item in self.parent.elements.find(self.path):
+ if item.tag == self.tag_name:
+ new_path = "{}/{}".format(self.path, tagname)
+ child = MetadataParentItem(new_path, self.parent, child_elements, len(self.current_items))
+ self.current_items.append(child)
+
+ def new(self):
+ new_path = "{}/{}".format(self.path, self.tag_name)
+ child = MetadataParentItem(new_path, self.parent, self.child_elements, len(self.current_items)+1)
+ self.current_items.append(child)
+
+ def pop(self):
"""
- Adds an individual item to the section
- :param item: the text that will be added to the multi-item section, wrapped in the appropriate tag
- configured on parent object
- :return: None
+ Remove the last element in element tree
+ :return: object
+ """
+
+ item_to_remove = None
+
+ for i in self.current_items:
+ item_to_remove = i
+
+ #j = copy.deepcopy(item_to_remove)
+
+ if item_to_remove is not None:
+ for item in self.parent.elements.find(self.path):
+ if item == item_to_remove.element:
+ self.parent.elements.find(self.path).remove(item)
+ self.current_items.remove(item_to_remove)
+
+ return item_to_remove
+
+ def remove(self, item):
"""
- self.elements.append(element)
+ Remove the given item from element tree
+ :param item:
+ :return:
+ """
+
+ for i in self.parent.elements.find(self.path):
+ if i == item.element:
+ self.parent.elements.find(self.path).remove(i)
+ self.current_items.remove(item)
def _removeall(self):
+ """
+ removes all items from element tree
+ :return:
+ """
items_to_remove = []
- for i in self.elements:
- items_to_remove.append(i)
+ for item in self.current_items:
+ items_to_remove.append(item)
- for i in items_to_remove:
- self.elements.remove(i)
+ for item in items_to_remove:
+ for i in self.parent.elements.find(self.path):
+ if i == item.element:
+ self.parent.elements.find(self.path).remove(i)
+ self.current_items.remove(item)
class MetadataParentItemConstructor(MetadataItemConstructor):
"""
- A helper object for more complex items like Contact and Locals
- This object will allow to add child elements to an item
+ A helper object for more complex items like Contact, Online Resources and Locals
+ This object will allow to add child elements to an item based on supplied element list
"""
def __init__(self, parent, child_elements):
@@ -229,32 +405,38 @@ def __init__(self, parent, child_elements):
i = 0
while i < len(self.child_elements):
for element in self.child_elements.keys():
- if self.child_elements[element]["parent"] == "element" and \
- "_{}".format(element) not in self.__dict__.keys():
- setattr(self, "_{}".format(element),
- self._create_item(self.element.iter(), self.element, self.child_elements[element]["path"]))
- setattr(self, element, self.__dict__["_{}".format(element)].value)
- i = 0
- elif "_{}".format(self.child_elements[element]["parent"]) in self.__dict__.keys() and \
- "_{}".format(element) not in self.__dict__.keys():
- setattr(self, "_{}".format(element),
- self._create_item(self.element.iter(),
- self.__dict__["_{}".format(self.child_elements[element]["parent"])],
- self.child_elements[element]["path"]))
- setattr(self, element, self.__dict__["_{}".format(element)].value)
- i = 0
+ path = self.child_elements[element]["path"]
+ if "_{}".format(element) not in self.__dict__.keys():
+ setattr(self, "_{}".format(element), self._create_item(path))
+ i = 0
else:
i += 1
- #self.value = self.element
def __setattr__(self, n, v):
- if n in ["path", "parent", "child_elements", "name", "value", "attr_lang", "attr_country"]:
+ if n in ["path", "parent", "child_elements", "value", "attr_lang", "attr_country"]:
self.__dict__[n] = v
else:
if n in self.child_elements.keys():
- if isinstance(v, str) or isinstance(v, unicode):
+ element_type = self.child_elements[n]["type"]
+
+ if element_type == "attribute":
+ key = self.child_elements[n]["key"]
+ if v is None or v == "":
+ self.__dict__["_{}".format(n)].element.attrib[key] = ""
+ else:
+ allowed_values = []
+ found = False
+ for value in self.child_elements[n]["values"]:
+ allowed_values.append(value[0])
+ if v == value[0]:
+ self.__dict__["_{}".format(n)].element.attrib[key] = value[1]
+ found = True
+ if not found:
+ raise TypeError("Value must be in {}".format(allowed_values))
+
+ elif isinstance(v, (str, unicode)):
self.__dict__["_{}".format(n)].element.text = v
elif v is None:
self.__dict__["_{}".format(n)].element.text = ""
@@ -266,18 +448,57 @@ def __setattr__(self, n, v):
def __getattr__(self, name):
if name != "child_elements" and name in self.child_elements.keys():
- return self.__dict__["_{}".format(name)].element.text
- #elif name == "value":
- # return self.element
+
+ element_type = self.child_elements[name]["type"]
+ if element_type == "attribute":
+ key = self.child_elements[name]["key"]
+ values = self.child_elements[name]["values"]
+ if key in self.__dict__["_{}".format(name)].element.attrib.keys():
+ v = self.__dict__["_{}".format(name)].element.attrib[key]
+ for value in values:
+ if v in value:
+ return value[0]
+ else:
+ return None
+
+ else:
+ #return self.__dict__["_{}".format(name)].value
+ return self.__dict__["_{}".format(name)].element.text # should be the same"
+
else:
return self.__dict__[name]
- def _create_item(self, iter, parent, tag_name):
- for i in iter:
- if i.tag == tag_name:
- return MetadataSubItemConstructor(i, parent, True)
- i = ET.Element(tag_name)
- return MetadataSubItemConstructor(i, parent)
+ def _create_item(self, tag_name):
+
+ tags = tag_name.split("/")
+ i = 0
+
+ parent = self.element
+
+ # search for tag
+ while i < len(tags):
+ tag = tags[i]
+ p = None
+ iterator = parent.iter()
+
+ for item in iterator:
+ # item exists already but is not the final one
+ if item.tag == tag and i < len(tags)-1:
+ p = item
+ break
+ # item exists already and is final one
+ elif item.tag == tag and i == len(tags)-1:
+ return MetadataSubItemConstructor(item)
+ # item does not yet exist
+ if p is None:
+ p = ET.Element(tag)
+ parent.append(p)
+ # if it is the final one
+ if i == len(tags)-1:
+ return MetadataSubItemConstructor(p)
+
+ parent = p
+ i += 1
class MetadataSubItemConstructor(object):
@@ -286,14 +507,10 @@ class MetadataSubItemConstructor(object):
This object can be placed as single item inside a parent items
"""
- def __init__(self, element, parent, exists=False):
+ def __init__(self, element):
- self.parent = parent
self.element = element
- if not exists:
- self.parent.append(element)
-
if self.element.text is not None:
self.value = self.element.text.strip()
else:
@@ -318,7 +535,7 @@ def value(self):
@value.setter
def value(self, v):
- if isinstance(v, str) or isinstance(v, unicode):
+ if isinstance(v, (str, unicode)):
self.element.text = v
elif v is None:
self.element.text = ""
@@ -327,3 +544,53 @@ def value(self, v):
def append(self, element):
self.element.append(element)
+
+
+####################################################
+
+
+class MetadataItem(MetadataItemConstructor):
+ """
+ A simple metadata item
+ Define path and position
+ """
+ def __init__(self, path, name, parent, sync=True):
+ self.path = path
+ self.name = name
+ self.sync = sync
+ super(MetadataItem, self).__init__(parent)
+
+
+class MetadataValueList(MetadataValueListConstructor):
+ """
+ A list metadata item
+ Define path, parent item position and item tag name
+ """
+
+ def __init__(self, tagname, path, name, parent=None, sync=True):
+ self.name = name
+ self.sync = sync
+ super(MetadataValueList, self).__init__(parent, tagname=tagname, path=path)
+
+
+class MetadataObjectList(MetadataObjectListConstructor):
+ """
+ A list metadata item
+ Define path, parent item position and item tag name
+ """
+
+ child_elements = {}
+
+ def __init__(self, tagname, path, parent, elements, sync=True):
+ self.sync = sync
+ super(MetadataObjectList, self).__init__(parent, tagname=tagname, path=path, child_elements=elements)
+
+
+class MetadataParentItem(MetadataParentItemConstructor):
+ """
+ Just a shortcut MetadataContacts that predefines the paths and position
+ """
+ # TODO: Define Role, Country and Online Resource list
+ def __init__(self, path, parent, elements, index=0):
+ self.path = "{0!s}[{1:d}]".format(path, index)
+ super(MetadataParentItem, self).__init__(parent, elements)
diff --git a/arcpy_metadata/metadata_editor.py b/arcpy_metadata/metadata_editor.py
index 86fe441..6974842 100644
--- a/arcpy_metadata/metadata_editor.py
+++ b/arcpy_metadata/metadata_editor.py
@@ -1,29 +1,33 @@
import os
import arcpy
+import xml
+import six
+import warnings
+import traceback
-# TODO: reduce dependencies from arcpy
-# it is actually only needed to extract metadata from GDB items
-# everything else can be done with out the module since metadata are access directly
-# if it is a feature class, arcpy can be loaded on demand using importlib
-# from importlib import import_module
+from datetime import date
+from datetime import datetime
-from arcpy_metadata.metadata_items import MetadataItem
-from arcpy_metadata.metadata_items import MetadataList
-from arcpy_metadata.metadata_items import MetadataLanguage
-from arcpy_metadata.metadata_items import MetadataContact
-from arcpy_metadata.metadata_items import MetadataLocals
+from arcpy_metadata.metadata_constructors import MetadataItem
+from arcpy_metadata.metadata_constructors import MetadataValueList
+from arcpy_metadata.metadata_constructors import MetadataParentItem
+from arcpy_metadata.metadata_constructors import MetadataObjectList
+from arcpy_metadata.metadata_constructors import MetadataValueListHelper
+from arcpy_metadata.metadata_constructors import MetadataObjectListHelper
-import xml
-import six
+from arcpy_metadata.metadata_items import MetadataLanguage
from arcpy_metadata.elements import elements
from arcpy_metadata.languages import languages
-from datetime import date
-from datetime import datetime
+# turn on warnings for deprecation once
+warnings.simplefilter('once', DeprecationWarning)
+# Make warnings look nice
+def warning_on_one_line(message, category, filename, lineno, file=None, line=None):
+ return '{}: {}\n'.format(category.__name__, message)
+warnings.formatwarning = warning_on_one_line
# TODO: Convert to using logging or logbook - probably logging to keep dependencies down
-
try: # made as part of a larger package - using existing logger, but logging to screen for now if not in that package
from log import write as logwrite
from log import warning as logwarning
@@ -31,13 +35,12 @@
def logwrite(log_string, autoprint=1): # match the signature of the expected log function
print(log_string)
-
def logwarning(log_string):
print("WARNING: {0:s}".format(log_string))
-installDir = arcpy.GetInstallInfo("desktop")["InstallDir"]
-xslt = os.path.join(installDir, r"Metadata\Stylesheets\gpTools\exact copy of.xslt")
+install_dir = arcpy.GetInstallInfo("desktop")["InstallDir"]
+xslt = os.path.join(install_dir, r"Metadata\Stylesheets\gpTools\exact copy of.xslt")
metadata_temp_folder = arcpy.env.scratchFolder # a default temp folder to use - settable by other applications so they can set it once
@@ -49,6 +52,7 @@ class MetadataEditor(object):
def __init__(self, dataset=None, metadata_file=None, items=None,
temp_folder=metadata_temp_folder):
+
if items is None:
items = list()
self.items = items
@@ -61,7 +65,7 @@ def __init__(self, dataset=None, metadata_file=None, items=None,
self._simple_datasets = ["ShapeFile", "RasterDataset", "Layer"]
self._layers = ["FeatureLayer"]
- if self.dataset: # for both, we want to export the metadata out
+ if self.dataset: # Check if dataset is set
# export the metadata to the temporary location
self.data_type = self.get_datatype()
@@ -81,12 +85,11 @@ def __init__(self, dataset=None, metadata_file=None, items=None,
xml_file = self.dataset + ".xml"
#if no XML file exists create one and add most basic metadata item to it
if not os.path.exists(xml_file):
- with open(xml_file, "w") as f:
- f.write('')
+ self._create_xml_file(xml_file)
self.metadata_file = xml_file
else:
- raise TypeError("Datatype is not supported")
+ raise TypeError("Cannot read {}. Data type is not supported".format(self.dataset))
# Metadata for GDB datasets are stored inside the GDB itself.
# We need to first export them to a temporary file, modify them and then import them back
@@ -96,54 +99,102 @@ def __init__(self, dataset=None, metadata_file=None, items=None,
self.metadata_file = os.path.join(self.temp_folder, metadata_filename)
if os.path.exists(self.metadata_file):
os.remove(self.metadata_file)
- logwrite("Exporting metadata to temporary file %s" % self.metadata_file)
+ logwrite("Exporting metadata to temporary file {0!s}".format(self.metadata_file))
arcpy.XSLTransform_conversion(self.dataset, xslt, self.metadata_file)
else:
- raise TypeError("Datatype is not supported")
+ raise TypeError("Cannot read {}. Data type is not supported".format(self.dataset))
+
+ elif self.metadata_file: # Check if metadata file is set instead
+ if self.metadata_file.endswith('.xml'):
+ if not os.path.exists(self.metadata_file):
+ self._create_xml_file(self.metadata_file)
+ self._workspace_type = 'FileSystem'
+ else:
+ raise TypeError("Metadata file is not an XML file. Check file extension")
self.elements.parse(self.metadata_file)
# create these all after the parsing happens so that if they have any self initialization, they can correctly perform it
for name in elements.keys():
- setattr(self, "_%s" % name, None)
+ if "sync" in elements[name].keys():
+ sync = elements[name]["sync"]
+ else:
+ sync = True
+ setattr(self, "_{0!s}".format(name), None)
if elements[name]['type'] in ["string", "date", "integer", "float"]:
- setattr(self, "_{}".format(name), MetadataItem(elements[name]['path'], name, self))
+ setattr(self, "_{}".format(name), MetadataItem(elements[name]['path'], name, self, sync))
if self.__dict__["_{}".format(name)].value is not None:
setattr(self, name, self.__dict__["_{}".format(name)].value.strip())
else:
setattr(self, name, self.__dict__["_{}".format(name)].value)
+ elif elements[name]['type'] == "attribute":
+ setattr(self, "_{}".format(name), MetadataItem(elements[name]['path'], name, self, sync))
+ if isinstance(self.__dict__["_{}".format(name)].attributes, dict):
+ key = elements[name]['key']
+ values = elements[name]['values']
+ if key in self.__dict__["_{}".format(name)].attributes.keys():
+ v = self.__dict__["_{}".format(name)].attributes[elements[name]['key']]
+ for value in values:
+ if v in value:
+ setattr(self, name, value[0])
+ break
+ else:
+ setattr(self, name, None)
+
elif elements[name]['type'] == "list":
- setattr(self, "_{}".format(name), MetadataList(elements[name]["tagname"], elements[name]['path'], name, self))
- setattr(self, name, self.__dict__["_{}".format(name)].value)
+ setattr(self, "_{}".format(name), MetadataValueList(elements[name]["tagname"], elements[name]['path'], name, self, sync))
+ #setattr(self, name, self.__dict__["_{}".format(name)].value)
+ #setattr(self, name, ListValues(self.__dict__["_{}".format(name)], name))
elif elements[name]['type'] == "language":
- setattr(self, "_{}".format(name), MetadataLanguage(elements[name]['path'], name, self))
+ setattr(self, "_{}".format(name), MetadataLanguage(elements[name]['path'], name, self, sync))
if self.__dict__["_{}".format(name)].value is not None:
setattr(self, name, self.__dict__["_{}".format(name)].value.strip())
else:
setattr(self, name, self.__dict__["_{}".format(name)].value)
- elif elements[name]['type'] == "local":
- setattr(self, name, MetadataLocals(elements[name]['path'], name, self))
-
- elif elements[name]['type'] == "contact":
- setattr(self, "_{}".format(name), MetadataContact(elements[name]['path'], name, self))
+ elif elements[name]['type'] == "parent_item":
+ setattr(self, "_{}".format(name), MetadataParentItem(elements[name]['path'], self, elements[name]['elements']))
setattr(self, name, self.__dict__["_{}".format(name)])
+ elif elements[name]['type'] == "object_list":
+ setattr(self, "_{}".format(name), MetadataObjectList(elements[name]["tagname"], elements[name]['path'], self, elements[name]['elements'], sync))
+ #setattr(self, name, self.__dict__["_{}".format(name)])
+
if elements[name] in self.__dict__.keys():
self.items.append(getattr(self, "_{}".format(elements[name])))
if items:
self.initialize_items()
+
+ @staticmethod
+ def _create_xml_file(xml_file):
+ with open(xml_file, "w") as f:
+ logwrite("Create new file {0!s}".format(xml_file))
+ f.write('')
+
+
def __setattr__(self, n, v):
+ """
+ Check if input value type matches required type for metadata element
+ and write value to internal property
+ :param n: string
+ :param v: string
+ :return:
+ """
if n in elements.keys():
+
+ # Warn if property got deprecated, but only if call is made by user, not during initialization
+ if "deprecated" in elements[n].keys() and traceback.extract_stack()[-2][2] != "__init__":
+ warnings.warn("Call to deprecated property {}. {}".format(n, elements[n]["deprecated"]), category=DeprecationWarning)
+
if elements[n]['type'] == "string":
- if isinstance(v, str) or isinstance(v, six.text_type):
+ if isinstance(v, (str, six.text_type)):
self.__dict__["_{}".format(n)].value = v
elif v is None:
self.__dict__["_{}".format(n)].value = ""
@@ -154,7 +205,7 @@ def __setattr__(self, n, v):
if isinstance(v, date):
self.__dict__["_{}".format(n)].value = date.strftime(v, "%Y%m%d")
- elif isinstance(v, str) or isinstance(v, six.text_type):
+ elif isinstance(v, (str, six.text_type)):
try:
new_value = datetime.strptime(v, "%Y%m%d").date()
self.__dict__["_{}".format(n)].value = date.strftime(new_value, "%Y%m%d")
@@ -164,7 +215,7 @@ def __setattr__(self, n, v):
elif v is None:
self.__dict__["_{}".format(n)].value = ""
else:
- raise RuntimeWarning("Input value must be of type a Date or a Sting ('yyyymmdd')")
+ raise RuntimeWarning("Input value must be of type a Date or a String ('yyyymmdd')")
elif elements[n]['type'] == "integer":
if isinstance(v, int):
@@ -183,7 +234,7 @@ def __setattr__(self, n, v):
elif elements[n]['type'] == "float":
if isinstance(v, float):
self.__dict__["_{}".format(n)].value = str(v)
- elif isinstance(v, str) or isinstance(v, six.text_type):
+ elif isinstance(v, (str, six.text_type)):
try:
new_value = float(v)
self.__dict__["_{}".format(n)].value = str(new_value)
@@ -194,8 +245,26 @@ def __setattr__(self, n, v):
else:
raise RuntimeWarning("Input value must be of type Float")
+ elif elements[n]['type'] == 'attribute':
+ key = elements[n]['key']
+ values = elements[n]['values']
+ if isinstance(v,(str, six.text_type)):
+ done = False
+ for value in values:
+ if v in value:
+ self.__dict__["_{}".format(n)].attributes[key] = value[1]
+ done = True
+ break
+ if not done:
+ raise RuntimeWarning("Input value must be one of: {}".format(values))
+ else:
+ raise RuntimeWarning("Input value must be one of: {}".format(values))
+
elif elements[n]['type'] == "list":
if isinstance(v, list):
+ #self.__dict__[n].value = ListValues(self.__dict__["_{}".format(n)], v)
+ self.__dict__["_{}".format(n)].value = v
+ elif isinstance(v, MetadataValueListHelper):
self.__dict__["_{}".format(n)].value = v
else:
raise RuntimeWarning("Input value must be of type List")
@@ -211,36 +280,35 @@ def __setattr__(self, n, v):
else:
raise RuntimeWarning("Input value must be in {}, an empty String or None".format(str(languages.keys())))
- elif elements[n]['type'] == "local":
- if isinstance(v, MetadataLocals):
- self.__dict__["_%s" % n] = v
+ elif elements[n]['type'] == "parent_item":
+ if isinstance(v, MetadataParentItem):
+ self.__dict__["_{0!s}".format(n)] = v
else:
- raise RuntimeWarning("Input value must be of type MetadataLocals")
-
- elif elements[n]['type'] == "contact":
- # if isinstance(v, list):
- # is_contact = True
- # for i in v:
- # print type(i)
- # if not isinstance(i, MetadataContact):
- # is_contact = False
- # break
- # if is_contact:
- # self.__dict__["_%s" % n].value = v
- # else:
- # raise RuntimeWarning("Input value must be of a List of MetadataContact object")
- # elif v is None:
- # self.__dict__["_%s" % n].value = []
- if isinstance(v, MetadataContact):
- self.__dict__["_%s" % n] = v
+ raise RuntimeWarning("Input value must be a MetadataParentItem object")
+
+ elif elements[n]['type'] == "object_list":
+ if isinstance(v, list):
+ self.__dict__["_{}".format(n)].value = v
+ elif isinstance(v, MetadataObjectList):
+ self.__dict__["_{}".format(n)].value = v
else:
- raise RuntimeWarning("Input value must be a MetadataContact object")
+ raise RuntimeWarning("Input value must be a MetadataOnlineResource object")
else:
self.__dict__[n] = v
def __getattr__(self, n):
+ """
+ Type cast output values according to required element type
+ :param n: string
+ :return:
+ """
if n in elements.keys():
+
+ # Warn if property got deprecated
+ if "deprecated" in elements[n].keys():
+ warnings.warn("Call to deprecated property {}. {}".format(n, elements[n]["deprecated"]), category=DeprecationWarning)
+
if self.__dict__["_{}".format(n)].value == "" and elements[n]['type'] in ["integer", "float", "date"]:
return None
elif elements[n]['type'] == "integer":
@@ -249,21 +317,45 @@ def __getattr__(self, n):
return float(self.__dict__["_{}".format(n)].value)
elif elements[n]['type'] == "date":
return datetime.strptime(self.__dict__["_{}".format(n)].value, "%Y%m%d").date()
- elif elements[n]['type'] == "contact":
+ elif elements[n]['type'] == "parent_item":
return self.__dict__["_{}".format(n)]
elif elements[n]['type'] == "language":
return self.__dict__["_{}".format(n)].get_lang()
+ elif elements[n]['type'] == 'attribute':
+ key = elements[n]['key']
+ values = elements[n]['values']
+ if key in self.__dict__["_{}".format(n)].attributes:
+ v = self.__dict__["_{}".format(n)].attributes[key]
+ for value in values:
+ if v in value:
+ return value[0]
+ else:
+ return None
+ elif elements[n]['type'] == "list":
+ return MetadataValueListHelper(self.__dict__["_{}".format(n)])
+ elif elements[n]['type'] == "object_list":
+ return MetadataObjectListHelper(self.__dict__["_{}".format(n)])
else:
return self.__dict__["_{}".format(n)].value
else:
return self.__dict__["_{}".format(n)]
def get_datatype(self):
+ """
+ Get ArcGIS datatype datatype of current dataset
+ :return:
+ """
# get datatype
desc = arcpy.Describe(self.dataset)
return desc.dataType
def get_workspace(self):
+ """
+ Find the workspace for current dataset
+ In case, base directory is not a workspace (ie when feature class is located in a feature dataset)
+ check next lower base directory of current base directory until criteria matches
+ :return:
+ """
workspace = self.dataset
desc = arcpy.Describe(workspace)
@@ -275,17 +367,51 @@ def get_workspace(self):
workspace = os.path.dirname(workspace)
if workspace == '' and arcpy.env.workspace:
return arcpy.env.workspace
+ if workspace == '':
+ return os.path.curdir
desc = arcpy.Describe(workspace)
def get_workspace_type(self):
+ """
+ Get ArcGIS Workspace type for current dataset
+ :return:
+ """
desc = arcpy.Describe(self._workspace)
return desc.workspaceType
def initialize_items(self):
+ """
+ Initialize all items
+ :return:
+ """
for item in self.items:
item.parent = self
+ def rm_gp_history(self):
+ """
+ Remove all items form the geoprocessing history
+ :return:
+ """
+ element = self.elements.find("Esri/DataProperties/lineage")
+ if element is not None:
+ i = 0
+ children = element.findall("Process")
+ for child in children:
+ element.remove(child)
+ i += 1
+ logwrite("Remove {} item(s) from the geoprocessing history".format(i), True)
+ else:
+ logwrite("There are no items in the geoprocessing history", True)
+
+
def save(self, Enable_automatic_updates=False):
+ """
+ Save pending edits to file
+ If feature class, import temporary XML file back into GDB
+
+ :param Enable_automatic_updates: boolean
+ :return:
+ """
logwrite("Saving metadata", True)
for item in self.items:
@@ -300,7 +426,11 @@ def save(self, Enable_automatic_updates=False):
arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_ARCGIS", self.dataset,
Enable_automatic_updates=Enable_automatic_updates)
- def cleanup(self, delete_created_fc=False):
+ def cleanup(self):
+ """
+ Remove all temporary files
+ :return:
+ """
try:
logwrite("cleaning up from metadata operation")
if self._workspace_type != 'FileSystem':
@@ -317,8 +447,8 @@ def cleanup(self, delete_created_fc=False):
def finish(self, Enable_automatic_updates=False):
"""
Alias for saving and cleaning up
+ :param Enable_automatic_updates: boolean
:return:
"""
-
self.save(Enable_automatic_updates)
self.cleanup()
diff --git a/arcpy_metadata/metadata_items.py b/arcpy_metadata/metadata_items.py
index 682ca9f..3128254 100644
--- a/arcpy_metadata/metadata_items.py
+++ b/arcpy_metadata/metadata_items.py
@@ -1,31 +1,13 @@
__author__ = 'Thomas.Maschler'
-from arcpy_metadata.metadata_constructors import MetadataItemConstructor
-from arcpy_metadata.metadata_constructors import MetadataListConstructor
-from arcpy_metadata.metadata_constructors import MetadataItemsConstructor
from arcpy_metadata.metadata_constructors import MetadataParentItemConstructor
-
-from arcpy_metadata.elements import contact_elements
from arcpy_metadata.languages import languages
-
# ########## General Info
-class MetadataItem(MetadataItemConstructor):
- def __init__(self, path, name, parent):
- self.path = path
- self.name = name
- super(MetadataItem, self).__init__(parent)
-
-# ########## Keywords
-
-class MetadataList(MetadataListConstructor):
-
- def __init__(self, tagname, path, name, parent=None):
- self.name = name
- super(MetadataList, self).__init__(parent, tagname=tagname, path=path)
+# #### locals
class MetadataLanguage(MetadataParentItemConstructor):
@@ -37,19 +19,22 @@ class MetadataLanguage(MetadataParentItemConstructor):
Predefined language pairs are stored in the global language_code dictionary
"""
- def __init__(self, path, name, parent):
+ def __init__(self, path, name, parent, sync=True):
self.parent = parent
self.name = name
self.path = path
+ self.sync = sync
language_elements = {
"attr_lang": {
"parent": "element",
- "path": "languageCode"},
+ "path": "languageCode",
+ "type": "string"},
"attr_country": {
"parent": "element",
- "path": "countryCode"}
+ "path": "countryCode",
+ "type": "string"}
}
super(MetadataLanguage, self).__init__(self.parent, language_elements)
@@ -65,21 +50,29 @@ def get_lang(self):
return ""
def __setattr__(self, n, v):
- if n in ["path", "parent", "child_elements", "name", "value"]:
+ if n in ["path", "parent", "child_elements", "name", "value", "sync"]:
self.__dict__[n] = v
elif n == "attr_lang":
if v == "" or v is None:
self._attr_lang.attributes = {}
else:
self._attr_lang.attributes = v
+ if self.sync:
+ self._attr_lang.attributes["Sync"] = "TRUE"
+ else:
+ self._attr_lang.attributes["Sync"] = "FALSE"
elif n == "attr_country":
if v == "" or v is None:
self._attr_country.attributes = {}
else:
self._attr_country.attributes = v
+ if self.sync:
+ self._attr_country.attributes["Sync"] = "TRUE"
+ else:
+ self._attr_country.attributes["Sync"] = "FALSE"
else:
if n in self.child_elements.keys():
- if isinstance(v, str) or isinstance(v, unicode):
+ if isinstance(v, (str, unicode)):
self.__dict__["_{}".format(n)].element.text = v
elif v is None:
self.__dict__["_{}".format(n)].element.text = ""
@@ -101,96 +94,13 @@ def __getattr__(self, name):
else:
return self.__dict__[name]
-# #### locals
-
-class MetadataLocal(MetadataParentItemConstructor):
- """
- A MetadataLocal Item
- """
-
- def __init__(self, parent, path, language, country):
-
- self.parent = parent
- self.path = "%s[@language='%s'][@country='%s']" % (path, language, country)
-
- super(MetadataLocal, self).__init__(self.parent)
-
- self.attributes = {}
- self.title = self._create_item(self.element.iter(), self.element, "resTitle")
- self.abstract = self._create_item(self.element.iter(), self.element, "idAbs")
-
-class MetadataLocals(MetadataItemsConstructor):
- """
- A MetadataLocals Item for Localized Titles and Abstracts
- Each Local Item has two children
- - Title
- - Abstract
- and a language and country attribute to define the local language
- Predefined language pairs are stored in the global language_code dictionary
- There can be many MetadataLocals instances
- """
-
- def __init__(self, path, name, parent=None):
-
- self.parent = parent
-
- self.name = name
- self.path = path
-
- super(MetadataLocals, self).__init__(parent, self.path)
- self._locals = {}
-
- for element in self.elements:
- attrib = element.attrib
- found = False
- for lang in languages:
- if languages[lang][0] == attrib["language"]:
- found = True
- break
- if found:
- self._locals[lang] = (MetadataLocal(self.parent, self.path, attrib["language"], attrib["country"]))
- def __iter__(self):
- return iter(self._locals)
- def __getitem__(self, key):
- return self._locals[key]
- def _write(self):
- items_to_remove = []
- for element in self.elements:
- items_to_remove.append(element)
- for element in items_to_remove:
- self.elements.remove(element)
-
- for lang in self._locals:
- self.elements.append(self._locals[lang])
-
- def new_local(self, lang):
-
- if lang in languages.keys():
- language = languages[lang][0]
- country = languages[lang][1]
- else:
- raise KeyError
-
- self._locals[lang] = (MetadataLocal(self.parent, self.path, language, country))
- self._write()
-
-
-class MetadataContact(MetadataParentItemConstructor):
- """
- Just a shortcut MetadataContacts that predefines the paths and position
- """
- # TODO: Define Role, Country and Online Resource list
- def __init__(self, path, name, parent=None, index=0):
- self.name = name
- self.path = "%s[%i]" % (path, index)
- super(MetadataContact, self).__init__(parent, contact_elements)
diff --git a/arcpy_metadata/version.py b/arcpy_metadata/version.py
index f986041..2a31623 100644
--- a/arcpy_metadata/version.py
+++ b/arcpy_metadata/version.py
@@ -1,2 +1,2 @@
-__version__ = '0.4.7'
+__version__ = '0.5'
__author__ = 'nickrsan, thomas.maschler'
diff --git a/tests/__init__.py b/tests/__init__.py
index ad1bee8..81b849c 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1 +1 @@
-__author__ = 'dsx'
+__author__ = 'dsx, nickrsan, thomas.maschler'
diff --git a/tests/test_elements.py b/tests/test_elements.py
new file mode 100644
index 0000000..4236df4
--- /dev/null
+++ b/tests/test_elements.py
@@ -0,0 +1,159 @@
+
+test_elements = {
+ "abstract": "This is the Abstract",
+
+ "alternate_title": "Alternative title",
+
+ "citation": "Citation",
+
+ "citation_contact": {
+ "role": "resource provider",
+ "contact_name": "Name 1",
+ "position": "Posution 1",
+ "organization": "Organisation 1",
+ "contact_info": "Contact Info 1",
+ "email": "Email 1",
+ "address": "Address 1",
+ "city": "City 1",
+ "state": "State 1",
+ "zip": "ZIP CODE 1",
+ "country": "Country 1",
+ "phone_nb": "Phone Number 1",
+ "fax_nb": "Fax Number 1",
+ "hours": "Hours 1",
+ "instructions": "Instructions 1",
+ "link": "Link 1",
+ "protocol": "Protocol 1",
+ "profile": "Profile 1",
+ "or_name": "Online Resource Name 1",
+ "or_desc": "Online Resource Description 1",
+ "or_function": "download"},
+
+ "credits": "Credits",
+
+ "dataset_uri": "Dataset URI",
+
+ "distance_resolution": "Distance Resolution",
+
+ "extent_description": "Extent Description",
+
+ "external_link": "External Link",
+
+ "format": "Format",
+
+ "file_identifier": "File identifier",
+
+ "identifier_code1": "Identifier Code 1",
+
+ "identifier_code2": "Identifier Code 2",
+
+ "identifier_code3": "Identifier Code 3",
+
+ "identifier_code4": "Identifier Code 4",
+
+ "language": "english",
+
+ "last_update": "20170101",
+
+ "license": "License",
+
+ "limitation": "Limitation",
+
+ "maintenance_contact": {
+ "role": "resource provider",
+ "contact_name": "Name 1",
+ "position": "Posution 1",
+ "organization": "Organisation 1",
+ "contact_info": "Contact Info 1",
+ "email": "Email 1",
+ "address": "Address 1",
+ "city": "City 1",
+ "state": "State 1",
+ "zip": "ZIP CODE 1",
+ "country": "Country 1",
+ "phone_nb": "Phone Number 1",
+ "fax_nb": "Fax Number 1",
+ "hours": "Hours 1",
+ "instructions": "Instructions 1",
+ "link": "Link 1",
+ "protocol": "Protocol 1",
+ "profile": "Profile 1",
+ "or_name": "Online Resource Name 1",
+ "or_desc": "Online Resource Description 1",
+ "or_function": "download"},
+
+ "max_scale": 5000,
+
+ "metadata_language": "english",
+
+ "min_scale": 50000,
+
+ "online_resource": [{
+ "link": "Link 1",
+ "protocol": "Protocol 1",
+ "profile": "Profile 1",
+ "name": "Name 1",
+ "description": "Description 1",
+ "function": "download"},
+ {"link": "Link 2",
+ "protocol": "Protocol 2",
+ "profile": "Profile 2",
+ "name": "Name 2",
+ "description": "Description 2",
+ "function": "offline access"}],
+
+ "place_keywords": ["Place 1", "Place 2", "Place 3"],
+
+ "point_of_contact": {
+ "role": "resource provider",
+ "contact_name": "Name 1",
+ "position": "Position 1",
+ "organization": "Organisation 1",
+ "contact_info": "Contact Info 1",
+ "email": "Email 1",
+ "address": "Address 1",
+ "city": "City 1",
+ "state": "State 1",
+ "zip": "ZIP CODE 1",
+ "country": "Country 1",
+ "phone_nb": "Phone Number 1",
+ "fax_nb": "Fax Number 1",
+ "hours": "Hours 1",
+ "instructions": "Instructions 1",
+ "link": "Link 1",
+ "protocol": "Protocol 1",
+ "profile": "Profile 1",
+ "or_name": "Online Resource Name 1",
+ "or_desc": "Online Resource Description 1",
+ "or_function": "download"},
+
+ "purpose": "Puropose",
+
+ "resource_label": "Resource Lable",
+
+ "scale_resolution": 1000,
+
+ "source": "Source",
+
+ "supplemental_information": "Supplemental Information",
+
+ "title": "Title",
+
+ "tags": ["Tag 1", "Tag 2", "Tag 3"],
+
+ "temporal_extent_description": "Temporal Extent Desciption",
+
+ "temporal_extent_end": "20170101",
+
+ "temporal_extent_instance": "20170101",
+
+ "temporal_extent_start": "20170101",
+
+ "update_frequency": "continual",
+
+ "update_frequency_description": "Update frequency description"
+
+}
+
+
+
diff --git a/tests/test_metadata_editor.py b/tests/test_metadata_editor.py
index 9f7953d..1d7832a 100644
--- a/tests/test_metadata_editor.py
+++ b/tests/test_metadata_editor.py
@@ -1,111 +1,211 @@
from __future__ import print_function
-__author__ = 'nickrsan'
-
-import datetime
import unittest
import os
import sys
import shutil
+import distutils
+from distutils import dir_util
import tempfile
import arcpy
import gc
+import inspect # allow to test arcpy_metadata even when it is not installed as module
+import datetime
-#allow to test arcpy_metadata even when it is not installed as module
-import inspect
-currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
-parentdir = os.path.dirname(currentdir)
-sys.path.insert(0, parentdir)
+current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+parent_dir = os.path.dirname(current_dir)
+sys.path.insert(0, parent_dir)
import arcpy_metadata as md
-original_test_data_folder = os.path.join(os.path.dirname(__file__), "test_data")
-
-
-def copy_test_data():
- print("Making new test data copy")
- temp_data_folder = tempfile.mkdtemp("arcpy_metadata_unit_tests")
- shutil.rmtree(temp_data_folder) # this is a silly hack - I want mkdtemp to generate a temp directory, but it needs to not exist in order to use copytree - TODO: Find a better way to handle this
- shutil.copytree(original_test_data_folder, temp_data_folder) # copy the directory tree so that we can get a clean copy of the data to work with and preserve the test data as clean
- return temp_data_folder
-
-
-class TestExampleCode(unittest.TestCase):
- """
- To start with, let's just get a simple test in that tests the code from the perspective of how someone might use it.
- We'll do that by running the example code, which should certainly run.
- """
-
- def _run_example(self, feature_class):
- metadata = md.MetadataEditor(feature_class) # also has a feature_layer parameter if you're working with one, but edits get saved back to the source feature class
- metadata.title = "The metadata title!"
-
- generated_time = "This layer was generated on {0:s}".format(
- datetime.datetime.now().strftime("%m/%d/%Y %I:%M %p"))
-
- metadata.purpose = "Layer represents locations of the rare Snipe."
-
- metadata.abstract += "generated by ___ software"
- metadata.abstract += generated_time
- metadata.tags += ["foo", "bar", "baz"] # tags.extend is equivalent to maintain list semantics
-
- metadata.finish() # save the metadata back to the original source feature class and cleanup. Without calling finish(), your edits are NOT saved!
-
- del metadata
- gc.collect()
-
- # TODO: for now, no assertions, we just want the code to run start to finish and we can manually check.
- # Later, we'll make code that actually attempts to read the data back and confirms it's there
- # TODO: make sure, GDB locks get properly remove to be able to run all test directly after another.
- # Deleting the metadata object and running the Garbage Collector doesn't seem to do anything
- # Right now you have to run test separately ( Makes sense, since you have to check the metadata manually)
-
- def test_shapefile_no_meta(self):
- test_data_folder = copy_test_data()
- print("Shp without metadata")
- self._run_example(os.path.join(test_data_folder, "simple_poly_no_metadata.shp"))
-
- def test_shapefile_with_meta(self):
- test_data_folder = copy_test_data()
- print("Shp with metadata")
- self._run_example(os.path.join(test_data_folder, "simple_poly_w_base_metadata.shp"))
-
- def test_shapefile_without_xml(self):
- test_data_folder = copy_test_data()
- print("Shp without XML File")
- self._run_example(os.path.join(test_data_folder, "simple_poly_no_xml.shp"))
-
- def test_feature_class_no_meta(self):
- test_data_folder = copy_test_data()
- print("FC without metadata")
- self._run_example(os.path.join(test_data_folder, "test.gdb", "root_poly"))
-
- def test_feature_class_with_meta(self):
- test_data_folder = copy_test_data()
- print("FC in dataset with metadata")
- self._run_example(os.path.join(test_data_folder, "test.gdb", "dataset", "dataset_poly"))
-
- def test_gdb_table(self):
- test_data_folder = copy_test_data()
- print("Table no metadata")
- self._run_example(os.path.join(test_data_folder, "test.gdb", "root_table"))
-
- def test_fc_layer(self):
- test_data_folder = copy_test_data()
- print("Feature class layer")
- arcpy.MakeFeatureLayer_management(os.path.join(test_data_folder, "test.gdb", "root_poly"), "layer")
- self._run_example("layer")
-
- def test_layer_file(self):
- test_data_folder = copy_test_data()
- print("Layer file metadata")
- self._run_example(os.path.join(test_data_folder, r"layer.lyr"))
-
- def test_raster_dataset(self):
- test_data_folder = copy_test_data()
- print("Raster dataset")
- self._run_example(os.path.join(test_data_folder, r"test.gdb\simple_raster"))
-
- def test_raster_file(self):
- test_data_folder = copy_test_data()
- print("Raster file")
- self._run_example(os.path.join(test_data_folder, r"simple_raster.tif"))
+# import test element dictionary
+from test_elements import test_elements
+
+
+
+
+class TestMetadataWriteRead(unittest.TestCase):
+ """
+ To start with, let's just get a simple test in that tests the code from the perspective of how someone might use it.
+ We'll do that by running the example code, which should certainly run.
+ """
+
+ def __init__(self, testcases):
+ self.temp_data_folder = None
+ super(TestMetadataWriteRead, self).__init__(testcases)
+
+ def setUp(self):
+ original_test_data_folder = os.path.join(os.path.dirname(__file__), "test_data")
+ self.temp_data_folder = tempfile.mkdtemp("arcpy_metadata_unit_tests")
+ distutils.dir_util.copy_tree(original_test_data_folder, self.temp_data_folder)
+
+ def tearDown(self):
+ arcpy.env.workspace = self.temp_data_folder
+ done = True
+ # delete all datasets
+ datasets = arcpy.ListDatasets()
+ for dataset in datasets:
+ arcpy.Delete_management(dataset)
+
+ # delete all workspaces
+ workspaces = arcpy.ListWorkspaces()
+ for workspace in workspaces:
+
+ #clear all locks
+ arcpy.Exists(workspace)
+ arcpy.Compact_management(workspace)
+ arcpy.Exists(workspace)
+ try:
+ arcpy.Delete_management(workspace)
+ except arcpy.ExecuteError:
+ print("cannot delete {} due to lock".format(workspace))
+ done = False
+
+ # delete directory with all remaining files
+ if done:
+ distutils.dir_util.remove_tree(self.temp_data_folder)
+
+
+ @staticmethod
+ def _write_metadata(data_set):
+ metadata = md.MetadataEditor(data_set)
+ # also has a feature_layer parameter if you're working with one, but edits get saved back to the source feature class
+
+ for key in test_elements.keys():
+ # write simple elements directly to property
+ if not isinstance(test_elements[key], (list, dict, )):
+ setattr(metadata, key, test_elements[key])
+
+ # for nested elements loop over children and write to properties of parent element
+ if isinstance(test_elements[key], dict):
+ item = getattr(metadata, key)
+ for k in test_elements[key].keys():
+ setattr(item, k, test_elements[key][k])
+
+ # for list elements loop either over children and write to properties of parent element or write entire list to property
+ if isinstance(test_elements[key], list):
+ if isinstance(test_elements[key][0], dict):
+ item = getattr(metadata, key)
+ i = 0
+ while len(item) < len(test_elements[key]):
+ item.new()
+ while len(item) > len(test_elements[key]):
+ item.pop()
+ for element in test_elements[key]:
+ for k in element.keys():
+ setattr(item[i], k, test_elements[key][i][k])
+ i += 1
+ else:
+ setattr(metadata, key, test_elements[key])
+
+ metadata.finish() # save the metadata back to the original source feature class and cleanup. Without calling finish(), your edits are NOT saved!
+
+ del metadata
+ gc.collect()
+
+ def _read_metadata(self, dataset):
+ metadata = md.MetadataEditor(dataset)
+
+ # Loop over all elements listed in test_elements to check if values were correctly saved
+ for key in test_elements.keys():
+ item = getattr(metadata, key)
+
+ # simple elements (text, numeric, date)
+ if not isinstance(test_elements[key], (list, dict)):
+ # convert date back to string
+ if isinstance(item, datetime.date):
+ item = item.strftime("%Y%m%d")
+ self.assertEqual(item, test_elements[key],
+ 'Value for element {} was not correctly saved'.format(key))
+
+ # parent items (eg contacts)
+ elif isinstance(test_elements[key], dict):
+ for k in test_elements[key].keys():
+ child = test_elements[key][k]
+ self.assertEqual(getattr(item, k), child,
+ 'Value for element {}.{} was not correctly saved'.format(key, k))
+
+ # lists
+ elif isinstance(test_elements[key], list):
+ # nested lists
+ if isinstance(test_elements[key][0], dict):
+
+ # make sure both lists are sorted in the same way
+ keys = test_elements[key][0].keys()
+ sorted_items = sorted(item, key=lambda x: (getattr(x, keys[0]), getattr(x, keys[1])))
+ sorted_elements = sorted(test_elements[key], key=lambda x: (x[keys[0]], x[keys[1]]))
+
+ i = 0
+ for sub_element in sorted_elements:
+ for k in sub_element:
+ child = sub_element[k]
+ self.assertEqual(getattr(sorted_items[i], k), child,
+ 'Value for element {}[{}].{} was not correctly saved'.format(key, i, k))
+ i += 1
+ # simple lists
+ else:
+ self.assertEqual(item.sort(), test_elements[key].sort(),
+ 'Value for element {} was not correctly saved'.format(key))
+ del metadata
+ gc.collect()
+
+ # TODO: make sure, GDB locks get properly remove to be able to run all test directly after another.
+ # Deleting the metadata object and running the Garbage Collector doesn't seem to do anything
+ # Right now you have to run test separately ( Makes sense, since you have to check the metadata manually)
+
+ def test_shapefile_no_meta(self):
+ print("Shp without metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, "simple_poly_no_metadata.shp"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "simple_poly_no_metadata.shp"))
+
+ def test_shapefile_with_meta(self):
+ print("Shp with metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, "simple_poly_w_base_metadata.shp"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "simple_poly_w_base_metadata.shp"))
+
+ def test_shapefile_without_xml(self):
+ print("Shp without XML File")
+ self._write_metadata(os.path.join(self.temp_data_folder, "simple_poly_no_xml.shp"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "simple_poly_no_xml.shp"))
+
+ def test_feature_class_no_meta(self):
+ print("FC without metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, "test.gdb", "root_poly"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "test.gdb", "root_poly"))
+
+ def test_feature_class_with_meta(self):
+ print("FC in dataset with metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, "test.gdb", "dataset", "dataset_poly"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "test.gdb", "dataset", "dataset_poly"))
+
+ def test_gdb_table(self):
+ print("Table no metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, "test.gdb", "root_table"))
+ self._read_metadata(os.path.join(self.temp_data_folder, "test.gdb", "root_table"))
+
+ def test_fc_layer(self):
+ print("Feature class layer")
+ arcpy.MakeFeatureLayer_management(os.path.join(self.temp_data_folder, "test.gdb", "root_poly"), "layer")
+ self._write_metadata("layer")
+ self._read_metadata("layer")
+
+ def test_layer_file(self):
+ print("Layer file metadata")
+ self._write_metadata(os.path.join(self.temp_data_folder, r"layer.lyr"))
+ self._read_metadata(os.path.join(self.temp_data_folder, r"layer.lyr"))
+
+ def test_raster_dataset(self):
+ print("Raster dataset")
+ self._write_metadata(os.path.join(self.temp_data_folder, r"test.gdb\simple_raster"))
+ self._read_metadata(os.path.join(self.temp_data_folder, r"test.gdb\simple_raster"))
+
+ def test_raster_file(self):
+ print("Raster file")
+ self._write_metadata(os.path.join(self.temp_data_folder, r"simple_raster.tif"))
+ self._read_metadata(os.path.join(self.temp_data_folder, r"simple_raster.tif"))
+
+
+if __name__ == '__main__':
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestMetadataWriteRead)
+ unittest.TextTestRunner(verbosity=2).run(suite)
+
+# unittest.main()
\ No newline at end of file