Skip to content

Commit

Permalink
support pipelines
Browse files Browse the repository at this point in the history
  • Loading branch information
hobu committed Mar 17, 2023
1 parent 6c4abb1 commit 933cb47
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 27 deletions.
2 changes: 1 addition & 1 deletion src/trenchrun/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__='0.0.4'
__version__='0.0.5'
Empty file removed src/trenchrun/__version__.py
Empty file.
6 changes: 1 addition & 5 deletions src/trenchrun/argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,9 @@ def get_parser(args):

parser = argparse.ArgumentParser(description='Compute the Ambient Absorption imagery product for lidar')
parser.add_argument('input',
help='PDAL-readable lidar content', type=pathlib.Path)
help='PDAL-readable lidar content as a file or a pipeline', type=pathlib.Path)
parser.add_argument('--output',
help='Output filename', type=str, default='exposure')
parser.add_argument('--filters',
help='Filter stages', type =str, default=None)
parser.add_argument('--reader_args',
help='PDAL Reader args as JSON object', type =str, default=None)
parser.add_argument('--resolution',
help='Raster output resolution', type =float, default=1.0)
parser.add_argument('--alpha',
Expand Down
62 changes: 41 additions & 21 deletions src/trenchrun/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,52 @@ class Data(object):
def __init__(self, args):

self.args = args
self.checkValidData()

if '.json' in self.args.input.suffixes:
self.inputType = 'pipeline'
else:
self.inputType = 'readable'

self.args.intensityPath = pathlib.Path(tempfile.NamedTemporaryFile(suffix='.tif', delete=False).name)
self.args.dsmPath = pathlib.Path(tempfile.NamedTemporaryFile(suffix='.tif', delete=False).name)
self.args.aoPath = pathlib.Path(tempfile.NamedTemporaryFile(suffix='.tif', delete=False).name)
self.checkValidData()

def readPipeline(self):
if self.inputType != 'pipeline':
raise RuntimeError("Data type is not pipeline!")
j = self.args.input.read_bytes().decode('utf-8')
stages = pdal.pipeline._parse_stages(j)
p = pdal.Pipeline(stages)

# strip off any writers we're making our own
stages = []
for stage in p.stages:
if stage.type.split('.')[0] != 'writers':
stages.append(stage)

p = pdal.Pipeline(stages)
return p


def readFile(self):
reader = pdal.Reader(str(self.args.input))
pipeline = reader.pipeline()
return pipeline

def __del__(self):
self.args.intensityPath.unlink()
self.args.dsmPath.unlink()
self.args.aoPath.unlink()

def checkValidData(self):
reader = pdal.Reader(str(self.args.input))
pipeline = reader.pipeline()
qi = pipeline.quickinfo

if self.inputType == 'pipeline':
reader = self.readPipeline()
else:
reader = self.readFile()

qi = reader.quickinfo
for key in qi:
dimensions = [i.strip() for i in qi[key]['dimensions'].split(',')]
if 'Intensity' not in dimensions:
Expand Down Expand Up @@ -76,14 +107,13 @@ def getWriters(self):
return intensity | dsm

def getPipeline(self):
reader = self.getReader()
filters = self.getFilters()
writers = self.getWriters()
stage = None
if filters:
stage = reader | filters | writers
if self.inputType == 'pipeline':
reader = self.readPipeline()
else:
stage = reader | writers
reader = self.readFile()

writers = self.getWriters()
stage = reader | writers

return stage

Expand All @@ -97,16 +127,6 @@ def execute(self):
count = pipeline.execute()
logs.logger.info(f'Wrote intensity and dsm for {count} points')

def getFilters(self):
if self.args.filters:
with open(self.args.filters,'r') as f:
j = json.loads(f.read())
stages = pdal.pipeline._parse_stages(json.dumps(j))
return pdal.Pipeline(stages)

else:
return None

def getImageCenter(self):
# Run our pipeline

Expand Down

0 comments on commit 933cb47

Please sign in to comment.