-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathSSURGO_BatchDownload_byRegion.py
567 lines (449 loc) · 25.7 KB
/
SSURGO_BatchDownload_byRegion.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
# ---------------------------------------------------------------------------
# SSURGO_BatchDownload_byRegion.py
# Created on: 01-09-2014
# Author: Adolfo.Diaz
# GIS Specialist
# National Soil Survey Center
# USDA - NRCS
# e-mail: [email protected]
# phone: 608.662.4422 ext. 216
# ---------------------------------------------------------------------------
# Download the most current SSURGO data from Web Soil Survey by Soil Survey Region. A buffer of
# about 2 surveys around the region will be included. This tool will normally be used in conjunction
# with the "Create Regional Spatial Geodatabase" tool.
#
# Uses Soil Data Access query to generate Areaname and version date for areasymbols being downloaded.
#
# This tool is specific to Soil Survey Regions in that a master table that contains SSA ownership
# is referenced.
#
# A SSURGO Access template will always be downloaed. If an existing dataset is found in the outputfolder
# the dates will not be compared. The dataset will simply be overwritten.
#
# The unzipped folder will be renamed to match the NRCS Geodata naming convention for soils. i.e soils_wi025
#
# Updated: 2016-12-16 Converted the SOAP request to POST-REST request to SDaccess. -- AD
# ==========================================================================================
# Updated 3/15/2021 - Adolfo Diaz
#
# - All urllib libraries were updated to reflect python 3.6 protocols
# - Updated and Tested for ArcGIS Pro 2.5.2 and python 3.6
# - All describe functions use the arcpy.da.Describe functionality.
# - All intermediate datasets are written to "in_memory" instead of written to a FGDB and
# and later deleted. This avoids having to check and delete intermediate data during every
# execution.
# - All cursors were updated to arcpy.da
# - Added code to remove layers from an .aprx rather than simply deleting them
# - Updated AddMsgAndPrint to remove ArcGIS 10 boolean and gp function
# - Updated errorMsg() Traceback functions slightly changed for Python 3.6.
# - Added parallel processing factor environment
# - swithced from sys.exit() to exit()
# - All gp functions were translated to arcpy
# - Every function including main is in a try/except clause
# - Main code is wrapped in if __name__ == '__main__': even though script will never be
# used as independent library.
# - Normal messages are no longer Warnings unnecessarily.
## ===================================================================================
def errorMsg():
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
theMsg = "\t" + traceback.format_exception(exc_type, exc_value, exc_traceback)[1] + "\n\t" + traceback.format_exception(exc_type, exc_value, exc_traceback)[-1]
if theMsg.find("exit") > -1:
AddMsgAndPrint("\n\n")
pass
else:
AddMsgAndPrint(theMsg,2)
except:
AddMsgAndPrint("Unhandled error in unHandledException method", 2)
pass
## ================================================================================================================
def AddMsgAndPrint(msg, severity=0):
# prints message to screen if run as a python script
# Adds tool message to the geoprocessor
#
#Split the message on \n first, so that if it's multiple lines, a GPMessage will be added for each line
try:
print(msg)
#for string in msg.split('\n'):
#Add a geoprocessing message (in case this is run as a tool)
if severity == 0:
arcpy.AddMessage(msg)
elif severity == 1:
arcpy.AddWarning(msg)
elif severity == 2:
arcpy.AddError("\n" + msg)
except:
pass
## ===================================================================================
def Number_Format(num, places=0, bCommas=True):
# Format a number according to locality and given places
try:
if bCommas:
theNumber = locale.format("%.*f", (places, num), True)
else:
theNumber = locale.format("%.*f", (places, num), False)
return theNumber
except:
errorMsg()
return ""
## ===================================================================================
def getRegionalAreaSymbolList(regionBufferTable, masterTable, userRegionChoice):
# Returns the actual region number from the first parameter.
# If the value has 1 integer than it should total 8 characters,
# last character will be returned. Otherwise, value has 2 integers
# and last 2 will be returned. Also return the real # of regional SSA ownership
# [u'WI001, u'WI003']
try:
# List of areasymbols that will be downloaded. Includes buffer
areaSymbolList = []
where_clause = "\"Region_Download\" = '" + userRegionChoice + "'"
with arcpy.da.SearchCursor(regionBufferTable, ('AREASYMBOL'), where_clause) as cursor:
for row in cursor:
areaSymbolList.append(row[0])
if len(userRegionChoice) == 8:
region = userRegionChoice[-1:]
else:
region = userRegionChoice[-2:]
where_clause = "\"Region_Ownership\" = " + str(region)
numOfRegionalSSA = len([row[0] for row in arcpy.da.SearchCursor(masterTable, ('AREASYMBOL'), where_clause)])
return areaSymbolList,numOfRegionalSSA
except:
errorMsg
return ""
## ===================================================================================
def getSDMaccessDict(areaSymbol):
try:
# Create empty list that will contain list of 'Areasymbol, AreaName
sdmAccessDict = dict()
#sQuery = "SELECT AREASYMBOL, AREANAME, CONVERT(varchar(10), [SAVEREST], 126) AS SAVEREST FROM SASTATUSMAP WHERE AREASYMBOL LIKE '" + areaSymbol + "' AND SAPUBSTATUSCODE = 2 ORDER BY AREASYMBOL"
sQuery = "SELECT AREASYMBOL, AREANAME, CONVERT(varchar(10), [SAVEREST], 126) AS SAVEREST FROM SASTATUSMAP WHERE AREASYMBOL LIKE '" + areaSymbol + "' ORDER BY AREASYMBOL"
url = "https://sdmdataaccess.nrcs.usda.gov/Tabular/SDMTabularService/post.rest"
# Create request using JSON, return data as JSON
dRequest = dict()
dRequest["format"] = "JSON"
dRequest["query"] = sQuery
jData = json.dumps(dRequest) # {"QUERY": "SELECT AREASYMBOL, AREANAME, CONVERT(varchar(10), [SAVEREST], 126) AS SAVEREST FROM SASTATUSMAP WHERE AREASYMBOL LIKE \'WI025\' ORDER BY AREASYMBOL", "FORMAT": "JSON"}
# Send request to SDA Tabular service using urllib2 library
jData = jData.encode('ascii')
response = urllib.request.urlopen(url, jData)
jsonString = response.read() # {"Table":[["WI025","Dane County, Wisconsin","2016-09-27"]]}
# Convert the returned JSON string into a Python dictionary.
data = json.loads(jsonString) # {u'Table': [[u'WI025', u'Dane County, Wisconsin', u'2016-09-27']]}
areasym = data['Table'][0][0]
areaname = data['Table'][0][1]
date = data['Table'][0][2]
sdmAccessDict[areaSymbol] = (areasym + "|" + str(date) + "|" + areaname)
return sdmAccessDict
""" ---------------------------------------------- This is the Original SOAP request to the SDMAccess; Replaced by a POST REST request -------------------------------------------------"""
## # This RunQuery runs your own custom SQL or SQL Data Shaping query against the Soil Data Mart database and returns an XML
## # data set containing the results. If an error occurs, an exception will be thrown.
## sXML = """<?xml version="1.0" encoding="utf-8"?>
## <soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="https://www.w3.org/2003/05/soap-envelope">
## <soap12:Body>
## <RunQuery xmlns="http://SDMDataAccess.nrcs.usda.gov/Tabular/SDMTabularService.asmx">
## <Query>""" + sQuery + """</Query>
## </RunQuery>
## </soap12:Body>
## </soap12:Envelope>"""
##
## dHeaders = dict()
## dHeaders["Host"] = "sdmdataaccess.nrcs.usda.gov"
## #dHeaders["User-Agent"] = "NuSOAP/0.7.3 (1.114)"
## #dHeaders["Content-Type"] = "application/soap+xml; charset=utf-8"
## dHeaders["Content-Type"] = "text/xml; charset=utf-8"
## dHeaders["Content-Length"] = len(sXML)
## dHeaders["SOAPAction"] = "http://SDMDataAccess.nrcs.usda.gov/Tabular/SDMTabularService.asmx/RunQuery"
## sURL = "SDMDataAccess.nrcs.usda.gov"
##
## try:
##
## # Create SDM connection to service using HTTP
## conn = httplib.HTTPConnection(sURL, 80)
##
## # Send request in XML-Soap
## conn.request("POST", "/Tabular/SDMTabularService.asmx", sXML, dHeaders)
##
## # Get back XML response
## response = conn.getresponse()
## xmlString = response.read()
##
## # Close connection to SDM
## conn.close()
##
## except HTTPError, e:
## AddMsgAndPrint("\t" + areaSymbol + " encountered HTTP Error querying SDMaccess (" + str(e.code) + ")", 2)
## sleep(i * 3)
## return ""
##
## except URLError, e:
## AddMsgAndPrint("\t" + areaSymbol + " encountered URL Error querying SDMaccess: " + str(e.reason), 2)
## sleep(i * 3)
## return ""
##
## except socket.timeout, e:
## AddMsgAndPrint("\t" + areaSymbol + " encountered server timeout error querying SDMacess", 2)
## sleep(i * 3)
## return ""
##
## except socket.error, e:
## AddMsgAndPrint("\t" + areaSymbol + " encountered SDMaccess connection failure", 2)
## sleep(i * 3)
## return ""
##
## except:
## errorMsg
## return ""
##
## # Convert XML to tree format
## tree = ET.fromstring(xmlString)
##
## areasym = ""
## areaname = ""
## date = ""
##
## # Iterate through XML tree, finding required elements...
## for rec in tree.iter():
##
## if rec.tag == "AREASYMBOL":
## areasym = str(rec.text)
##
## if rec.tag == "AREANAME":
## areaname = str(rec.text)
##
## if rec.tag == "SAVEREST":
## # get the YYYYMMDD part of the datetime string
## # then reformat to match SQL query
## date = str(rec.text).split(" ")[0]
except HTTPError as e:
AddMsgAndPrint('HTTP Error' + str(e),2)
return ""
except:
errorMsg()
return ""
## ===================================================================================
def GetDownload(areasym, surveyDate):
""" download survey from Web Soil Survey URL and return name of the zip file
want to set this up so that download will retry several times in case of error
return empty string in case of complete failure. Allow main to skip a failed
survey, but keep a list of failures
As of Aug 2013, states are using either a state or US 2003 template databases which would
result in two possible zip file names. If that changes in the future, these URL will fail
UPDATED: 1/9/2014 In this version, only the most current SSURGO dataset is downloaded
with a SSURGO Access Template
example URL without Template:
https://websoilsurvey.sc.egov.usda.gov/DSD/Download/Cache/SSA/wss_SSA_NE001_[2012-08-10].zip"""
#create URL string from survey string and WSS 3.0 cache URL
baseURL = "https://websoilsurvey.sc.egov.usda.gov/DSD/Download/Cache/SSA/"
try:
# create two possible zip file URLs, depending on the valid Template databases available
# Always download dataset with a SSURGO Access template.
zipName1 = "wss_SSA_" + areaSym + "_soildb_US_2003_[" + surveyDate + "].zip" # wss_SSA_WI025_soildb_US_2003_[2012-06-26].zip
zipName2 = "wss_SSA_" + areaSym + "_soildb_" + areaSym[0:2] + "_2003_[" + surveyDate + "].zip" # wss_SSA_WI025_soildb_WI_2003_[2012-06-26].zip
zipURL1 = baseURL + zipName1 # https://websoilsurvey.sc.egov.usda.gov/DSD/Download/Cache/SSA/wss_SSA_WI025_soildb_WI_2003_[2021-09-07].zip
zipURL2 = baseURL + zipName2 # https://websoilsurvey.sc.egov.usda.gov/DSD/Download/Cache/SSA/wss_SSA_WI025_soildb_WI_2003_[2012-06-26].zip
AddMsgAndPrint("\tGetting zipfile from Web Soil Survey...", 0)
# number of attempts allowed
attempts = 5
for i in range(attempts):
try:
# create a response object for the requested URL to download a specific SSURGO dataset.
try:
# try downloading zip file with US 2003 Template DB first
## r = requests.get(zipURL1)
## z = zipfile.ZipFile(io.BytesIO(r.content))
## zipContents = z.namelist()
## z.extractall(path=outputFolder,members=zipContents)
request = urlopen(zipURL1)
zipName = zipName1
except:
# if the zip file with US Template DB is not found, try the state template for 2003
# if the second attempt fails, it should fall down to the error messages
request = urlopen(zipURL2)
zipName = zipName2
# path to where the zip file will be written to
local_zip = os.path.join(outputFolder, zipName) # C:\Temp\peaslee_download\wss_SSA_WI025_soildb_WI_2003_[2012-06-26].zip
# delete the output zip file it exists
if os.path.isfile(local_zip):
os.remove(local_zip)
# response object is actually a file-like object that can be read and written to a specific location
output = open(local_zip, "wb")
output.write(request.read())
output.close()
# Download succeeded; return zipName; no need for further attempts; del local variables
del request, local_zip, output, attempts, zipName1, zipName2, zipURL1, zipURL2
return zipName
except HTTPError as e:
AddMsgAndPrint("\t" + areaSym + " encountered HTTP Error (" + str(e.code) + ")", 2)
except URLError as e:
AddMsgAndPrint("\t" + areaSym + " encountered URL Error: " + str(e.reason), 2)
except socket.timeout as e:
AddMsgAndPrint("\t" + areaSym + " encountered server timeout error", 2)
except socket.error as e:
AddMsgAndPrint("\t" + areasym + " encountered Web Soil Survey connection failure", 2)
except:
# problem deleting partial zip file after connection error?
# saw some locked, zero-byte zip files associated with connection errors
AddMsgAndPrint("\tFailed to download zipfile", 0)
sleep(1)
return ""
# Download Failed!
return ""
except:
errorMsg()
return ""
## ===================================== MAIN BODY ==============================================
# Import system modules
import arcpy, sys, os, locale, string, traceback, shutil, zipfile, glob, socket, json, urllib
from urllib.request import Request, urlopen, URLError, HTTPError
import requests, zipfile
#from urllib2 import urlopen, URLError, HTTPError
from arcpy import env
from time import sleep
if __name__ == '__main__':
try:
#--------------------------------------------------------------------------------------------Set Parameters
arcpy.env.parallelProcessingFactor = "75%"
arcpy.env.overwriteOutput = True
# Script arguments...
regionChoice = arcpy.GetParameterAsText(0) # User selects what region to download
#regionChoice = "Region 10"
outputFolder = arcpy.GetParameterAsText(1) # Folder to write the zip files to
#outputFolder = r'K:\SSURGO_FY16\WSS'
# Path to the regional table that contains SSAs by region with extra extent and master table
regionalTable = os.path.dirname(sys.argv[0]) + os.sep + "SSURGO_Soil_Survey_Area.gdb\SSA_by_Region_buffer"
masterTable = os.path.dirname(sys.argv[0]) + os.sep + "SSURGO_Soil_Survey_Area.gdb\soilsa_a_nrcs"
# set workspace to output folder
env.workspace = outputFolder
# Bail if reginal master table is not found
if not arcpy.Exists(regionalTable) or not arcpy.Exists(masterTable):
AddMsgAndPrint("\nRegion Buffer Table or Master Table is missing from " + os.path.dirname(sys.argv[0]),2)
exit()
# Get a list of areasymbols to download from the Regional Master Table. [u'WI001, u'WI003']
asList,numOfRegionalSSA = getRegionalAreaSymbolList(regionalTable,masterTable,regionChoice)
if not len(asList) > 0:
AddMsgAndPrint("\nNo Areasymbols were selected. Possible problem with table",2)
exit()
AddMsgAndPrint("\n\n" + str(len(asList)) + " SSURGO Dataset(s) will be downloaded for " + regionChoice)
AddMsgAndPrint("\tNumber of Soil Survey Areas assigned to " + regionChoice + ": " + str(numOfRegionalSSA))
AddMsgAndPrint("\tNumber of Additional SSAs to be downloaded for the use of static datasets: " + str(len(asList) - numOfRegionalSSA))
failedList = list() # track list of failed downloads
# Progress Counter
iGet = 0
arcpy.SetProgressor("step", "Downloading current SSURGO data from Web Soil Survey.....", 0, len(asList), 1)
asList.sort()
for SSA in asList:
AddMsgAndPrint("\nAttempting connection and download for: " + SSA)
iGet += 1
# Query SDMaccess Areaname and Spatial Version Date for a given areasymobl; return a dictionary
asDict = getSDMaccessDict(SSA) #{u'WI001': 'WI001, 2011-08-10, Adams County, Wisconsin'}
# if asDict came back empty, try to retrieve information again
if len(asDict) < 1:
asDict = getSDMaccessDict(SSA)
# Could not get SDaccess info for this SSA - cannot continue
if len(asDict) < 1:
AddMsgAndPrint("\tCould not get information for " + SSA + " from SD Access",2)
failedList.append(SSA)
continue
survey = asDict[SSA]
surveyInfo = survey.split("|")
# Get Areasymbol, Date, and Survey Name from 'asDict'
areaSym = surveyInfo[0].strip().upper() # Why get areaSym again???
surveyDate = surveyInfo[1].strip() # Don't need this since we will always get the most current
surveyName = surveyInfo[2].strip() # Adams County, Wisconsin
# set final path to NRCS Geodata Standard for Soils; This is what the unzipped folder will be renamed to
newFolder = os.path.join(outputFolder, "soil_" + areaSym.lower())
if os.path.exists(newFolder):
#AddMsgAndPrint("\nOutput dataset for " + areaSym + " already exists and will be overwritten", 0)
#arcpy.Delete_management(newFolder, "Folder")
AddMsgAndPrint("\tOutput dataset for " + areaSym + " already exists. Moving to the next one", 0)
continue
AddMsgAndPrint("\tDownloading survey " + areaSym + ": " + surveyName + " - Version: " + str(surveyDate), 0)
arcpy.SetProgressorLabel("Downloading survey " + areaSym.upper() + " (" + Number_Format(iGet, 0, True) + " of " + Number_Format(len(asList), 0, True) + ")")
# Allow for multiple attempts to get zip file
iTry = 2
# Download the zip file; Sometimes a corrupt zip file is downloaded, so a second attempt will be made if the first fails
for i in range(iTry):
try:
zipName = GetDownload(areaSym, surveyDate) # wss_SSA_WI025_soildb_WI_2003_[2012-06-26].zip
# path to the zip file i.e C:\Temp\peaslee_download\wss_SSA_WI025_soildb_WI_2003_[2012-06-26].zip
local_zip = os.path.join(outputFolder, zipName)
# if file is valid zipfile extract the file contents
#if os.path.isfile(local_zip):
if zipfile.is_zipfile(local_zip):
zipSize = (os.stat(local_zip).st_size)/1048576
# Proceed if size of zip file is greater than 0 bytes
if zipSize > 0:
# Less than 1 would be Kilabytes; show 2 decimal places
if zipSize < 1:
AddMsgAndPrint("\t\tUnzipping " + Number_Format(zipSize, 2, True) + " KB file to " + outputFolder, 0)
# Greater than 1 would be Megabytes; show 1 decimal place
else:
AddMsgAndPrint("\t\tUnzipping " + Number_Format(zipSize, 1, True) + " MB file to " + outputFolder, 0)
# Extract all members from the archive to the current working directory
with zipfile.ZipFile(local_zip, "r") as z:
# a bad zip file returns exception zipfile.BadZipFile
z.extractall(outputFolder)
# remove zip file after it has been extracted,
# allowing a little extra time for file lock to clear
sleep(3)
os.remove(local_zip)
# rename output folder to NRCS Geodata Standard for Soils
if os.path.isdir(os.path.join(outputFolder, areaSym.upper())):
# this must be a newer zip file using the uppercase AREASYMBOL directory
os.rename(os.path.join(outputFolder, areaSym.upper()), newFolder)
elif os.path.isdir(os.path.join(outputFolder, zipName[:-4])):
# this is an older zip file that has the 'wss_' directory structure
os.rename(os.path.join(outputFolder, zipName[:-4]), newFolder)
else:
# none of the subfolders within the zip file match any of the expected names
AddMsgAndPrint("Subfolder within the zip file does not match any of the expected names",2)
exit()
# import FGDC metadata to mapunit polygon shapefile
spatialFolder = os.path.join(newFolder, "spatial")
env.workspace = spatialFolder
shpList = arcpy.ListFeatureClasses("soilmu_a*", "Polygon")
## try:
## if len(shpList) == 1:
## muShp = shpList[0]
## AddMsgAndPrint("\t\tImporting metadata for " + muShp)
## metaData = os.path.join(newFolder, "soil_metadata_" + areaSym.lower() + ".xml")
## arcpy.ImportMetadata_conversion(metaData, "FROM_FGDC", os.path.join(spatialFolder, muShp), "ENABLED")
## del spatialFolder, muShp, metaData
##
## except:
## AddMsgAndPrint("\t\tImporting metadata for " + muShp + " Failed. ", 2)
## pass
# end of successful zip file download
break
# Zip file size is empty. Attempt again if 2nd attempt has not been executed
else:
if i == 0:
AddMsgAndPrint("\n\tZip file for " + areaSym + " is empty. Reattempting to download, 1")
os.remove(local_zip)
continue
# Zip file is corrupt or missing
else:
if i == 0:
AddMsgAndPrint("\n\tZip file for " + areaSym + " is missing. Reattempting to download, 1")
continue
# download zip file again if this is first error
except zipfile.BadZipfile:
AddMsgAndPrint("Bad zip file?", 2)
pass
except:
pass
# download for this survey failed twice
if not os.path.exists(newFolder):
AddMsgAndPrint("\n\tDownload failed for " + areaSym + ": " + surveyName, 2)
failedList.append(SSA)
del asDict, survey, surveyInfo, areaSym, surveyDate, surveyName, newFolder, iTry
arcpy.SetProgressorPosition()
if len(failedList) > 0:
AddMsgAndPrint("\n" + str(len(asList) - len(failedList)) + " ouf of " + str(len(asList)) + " were successfully downloaded.",2)
AddMsgAndPrint("\tThese surveys failed to download properly: " + ", ".join(failedList),2)
else:
AddMsgAndPrint("\nAll SSURGO datasets downloaded successfully\n")
arcpy.SetProgressorLabel("Processing complete...")
env.workspace = outputFolder
except:
errorMsg()