Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 2 additions & 11 deletions buildout.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ parts =
modwsgi
template
modwsgi-patch
liblas

develop = .

Expand Down Expand Up @@ -48,14 +47,12 @@ dbport = overwriteme
db = overwrite_me
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

overwrite_me > overwriteme (idem for instanceid)


# LIDAR tool configuration
lidar_fusion_cmd = overwriteme
lidar_lastool_cmd = overwriteme
lidar_data = overwriteme
lidar_data_normalized = overwriteme

intranet_code = overwriteme
debug_log = False

liblas = libLAS-1.8.0-cp27-none-win32.whl
intranet_code = overwriteme

[pyramid]
recipe = zc.recipe.egg
Expand Down Expand Up @@ -91,9 +88,3 @@ cmds =
>>> else:
>>> print line
>>> fileinput.close()

[liblas]
recipe = collective.recipe.cmd
on_install = true
on_update = true
cmds = ${buildout:directory}\buildout\bin\pip.exe install ${buildout:directory}\wheels\${vars:liblas}
3 changes: 1 addition & 2 deletions development.ini.in
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,10 @@ pyramid.includes =
sqlalchemy.url = postgresql://${vars:dbuser}:${vars:dbpassword}@${vars:dbhost}:${vars:dbport}/${vars:db}

# LIDAR tool configuration
lidar_fusion_cmd = ${vars:lidar_fusion_cmd}
lidar_lastool_cmd = ${vars:lidar_lastool_cmd}
lidar_data = ${vars:lidar_data}
lidar_data_normalized = ${vars:lidar_data_normalized}
lidar_output_dir = %(here)s/data/
debug_log = ${vars:debug_log}

app.cfg = %(here)s/config.yaml

Expand Down
67 changes: 41 additions & 26 deletions las_extractor/util/point_cloud_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,14 @@
import numpy as np
from shapely.geometry import LineString
import uuid
from liblas import file

from laspy import file
from datetime import datetime

import logging

log = logging.getLogger(__name__)

try:
import osgeo.ogr as ogr
import osgeo.osr as osr
Expand Down Expand Up @@ -36,7 +41,6 @@ def generate_tile_list(line, bufferSizeMeter, outputDir, fileList, dataDir):
for row in intersectResult:
checkEmpty += 1
tileList.append(dataDir + str(row.file.strip() + '.las'))

return polygon, checkEmpty, tileList

# Read the numpy data and append them to json-serializable list
Expand All @@ -63,7 +67,7 @@ def generate_json(profile, jsonOutput, csvOut, classesList, classesNames):
'y': round(row[3]*100)/100
})

def pointCloudExtractorV2(coordinates, bufferSizeMeter, outputDir, dataDir, jsonOutput, csvOut, classesList, classesNames, perfLogStr):
def pointCloudExtractorV2(coordinates, bufferSizeMeter, outputDir, dataDir, jsonOutput, csvOut, classesList, classesNames, debug_log):

distanceFromOrigin = 0
zMin = []
Expand All @@ -90,8 +94,9 @@ def pointCloudExtractorV2(coordinates, bufferSizeMeter, outputDir, dataDir, json
beforeRequest = datetime.now()
polygon, checkEmpty, tileList = generate_tile_list(segment, bufferSizeMeter, outputDir, fileList, dataDir)
afterRequest = datetime.now()
perfLogStr += '***********PG REQUEST TIME*************\n'
perfLogStr += str(afterRequest - beforeRequest) + '\n'

if debug_log == True:
log.warning("PG REQUEST TIME: "+str(afterRequest - beforeRequest))

# Point Cloud extractor V2
seg = {'y1': xyStart[1], 'x1': xyStart[0], 'y2': xyEnd[1], 'x2': xyEnd[0]}
Expand All @@ -105,29 +110,39 @@ def pointCloudExtractorV2(coordinates, bufferSizeMeter, outputDir, dataDir, json
startIterateTile = datetime.now()
for tile in tileList:
cloud = file.File(tile, mode = 'r')
# iterate over cloud's points
for p in cloud:
# Needs enhancements...
if p.x <= max(seg['x1'] + bufferSizeMeter, seg['x2'] + bufferSizeMeter) \
and p.x >= min(seg['x1'] - bufferSizeMeter, seg['x2'] - bufferSizeMeter) \
and p.y <= max(seg['y1'] + bufferSizeMeter, seg['y2'] + bufferSizeMeter) \
and p.y >= min(seg['y1'] - bufferSizeMeter, seg['y2'] - bufferSizeMeter):
xOB = p.x - seg['x1']
yOB = p.y - seg['y1']
hypo = math.sqrt(xOB * xOB + yOB * yOB)
cosAlpha = (xOA * xOB + yOA * yOB)/(math.sqrt(xOA * xOA + yOA * yOA) * hypo)
alpha = math.acos(cosAlpha)
normalPointToLineDistance = math.sin(alpha) * hypo
# Filter for normal distance smaller or equal to buffer size
if normalPointToLineDistance <= bufferSizeMeter:
exctractedPoints.append({'x': p.x, 'y': p.y, 'z': p.z, 'classification': p.classification})
lineList = [p.x, p.y, p.z, cosAlpha, p.classification]
table.append(lineList)


for x, y, z, classification in np.nditer((cloud.x, cloud.y, cloud.z, cloud.classification)):
x = x.item()
y = y.item()
if x <= max(seg['x1'] + bufferSizeMeter, seg['x2'] + bufferSizeMeter) \
and x >= min(seg['x1'] - bufferSizeMeter, seg['x2'] - bufferSizeMeter) \
and y <= max(seg['y1'] + bufferSizeMeter, seg['y2'] + bufferSizeMeter) \
and y >= min(seg['y1'] - bufferSizeMeter, seg['y2'] - bufferSizeMeter):
xOB = x - seg['x1']
yOB = y - seg['y1']
hypo = math.sqrt(xOB * xOB + yOB * yOB)
cosAlpha = (xOA * xOB + yOA * yOB)/(math.sqrt(xOA * xOA + yOA * yOA) * hypo)
if cosAlpha > 1:
cosAlpha = 1

alpha = math.acos(cosAlpha)
normalPointToLineDistance = math.sin(alpha) * hypo
# Filter for normal distance smaller or equal to buffer size
if normalPointToLineDistance <= bufferSizeMeter:
z = z.item()
classification = classification.item()
exctractedPoints.append({'x': x, 'y': y, 'z': z, 'classification': classification})
lineList = [x, y, z, cosAlpha, classification]
table.append(lineList)

cloud.close()

stopIterateTile = datetime.now()
perfLogStr += '*********ITERATE OVER TILE AND POINTS TIME*************\n'
perfLogStr += str(stopIterateTile - startIterateTile) + '\n'

if debug_log == True:
log.warning("ITERATE OVER TILE AND POINTS TIME: "+str(stopIterateTile - startIterateTile))
log.warning('Found '+str(len(table))+" points")
# Convert the list into numpy array for fast sorting
data = np.array(table)

Expand Down Expand Up @@ -158,7 +173,7 @@ def pointCloudExtractorV2(coordinates, bufferSizeMeter, outputDir, dataDir, json
# Read the numpy data and append them to json-serializable list
generate_json(profile, jsonOutput, csvOut, classesList, classesNames)

return jsonOutput, zMin, zMax, checkEmpty, perfLogStr
return jsonOutput, zMin, zMax, checkEmpty

# Export csv output file to google kml 3D
def csv2kml(csvFile, markerUrl, outputKml, classesNames, kmlColors):
Expand Down
31 changes: 21 additions & 10 deletions las_extractor/views/lidar_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@

import sys

import logging

log = logging.getLogger(__name__)

@view_config(route_name='lidar_profile', renderer='jsonp')
def lidar_profile(request):
"""
Expand All @@ -40,8 +44,17 @@ def lidar_profile(request):


startProcess = datetime.now()
perfLogStr = "Las extractor performance log \n"
perfLogStr += 'Request: ' + str(uuid.uuid4()) + '\n'

debug_log = request.registry.settings['debug_log']

if debug_log == 'True':
debug_log = True
else:
debug_log = False

if debug_log == True:
log.warning("Request: "+str(uuid.uuid4()))

# Get resolution settings
resolution = request.registry.settings['resolution']

Expand All @@ -62,8 +75,6 @@ def lidar_profile(request):
classesList = []
jsonOutput=[]

performanceLog = open(outputDir + 'PerformanceLog.txt', 'w+')

# Create the csv output file for later shp/kml/csv file download (on user demand)
csvOut = open(outputDir + outputCsv, 'w')
csvOut.write('distance,altitude,x,y,class\n') # csv file header
Expand Down Expand Up @@ -117,9 +128,9 @@ def lidar_profile(request):
errorMsg += str(math.ceil(fullLine.length * 1000) / 1000) + 'm ' +_('long') +', '
errorMsg += _('max allowed length is') +': ' + str(maxLineDistance) + 'm </p>'
return {'Warning': errorMsg}

# ***Point cloud extractor, V2***
jsonOutput, zMin, zMax, checkEmpty, perfLogStr = pointCloudExtractorV2(geom.coordinates, bufferSizeMeter, outputDir, dataDir, jsonOutput, csvOut, classesList, classesNames, perfLogStr)
jsonOutput, zMin, zMax, checkEmpty = pointCloudExtractorV2(geom.coordinates, bufferSizeMeter, outputDir, dataDir, jsonOutput, csvOut, classesList, classesNames, debug_log)

# If no tile is found in the area intersected by the segment, return error message
if checkEmpty == 0:
Expand All @@ -134,10 +145,10 @@ def lidar_profile(request):
# Close IO stream
csvOut.close()
endProcess = datetime.now()
perfLogStr += '*********TOTAL TIME***********\n'
perfLogStr += str(endProcess - startProcess) + '\n'
performanceLog.write(perfLogStr)
performanceLog.close()

if debug_log == True:
log.warning("TOTAL TIME: "+str(endProcess - startProcess))

return {
'profile': jsonOutput,
'series':classesList,
Expand Down
4 changes: 2 additions & 2 deletions production.ini.in
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ pyramid.includes =
sqlalchemy.url = postgresql://${vars:dbuser}:${vars:dbpassword}@${vars:dbhost}:${vars:dbport}/${vars:db}

# LIDAR tool configuration
lidar_fusion_cmd = ${vars:lidar_fusion_cmd}
lidar_lastool_cmd = ${vars:lidar_lastool_cmd}
lidar_data = ${vars:lidar_data}
lidar_data_normalized = ${vars:lidar_data_normalized}
lidar_output_dir = %(here)s/data/

debug_log = ${vars:debug_log}

app.cfg = %(here)s/config.yaml

[server:main]
Expand Down
5 changes: 1 addition & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,10 @@
'numpy',
'pyyaml',
'pip',
'laspy',
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Extra ,

],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
message_extractors={'las_extractor': [
('static/**', 'ignore', None),
('**.py', 'python', None),
('templates/**', 'mako', {'input_encoding': 'utf-8'})]},
zip_safe=False,
entry_points={
'paste.app_factory': [
Expand Down
1 change: 1 addition & 0 deletions versions.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ collective.recipe.modwsgi = 2.1
GeoAlchemy2 = 0.2.4
geoalchemy2 = 0.2.4
geojson = 1.0.9
laspy = 1.3.0
Mako = 1.0.1
mako = 1.0.1
markupsafe = 0.23
Expand Down