diff --git a/.travis.yml b/.travis.yml index 5507d58..2b51c77 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,6 @@ os: - linux - osx julia: - - 0.7 - 1.0 - nightly matrix: diff --git a/README.md b/README.md index af15eb9..75e3ead 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ Julia package for reading and writing the LAS lidar format. -This is a pure Julia alternative to [LibLAS.jl](https://github.com/visr/LibLAS.jl) or [Laszip.jl](https://github.com/joa-quim/Laszip.jl). Currently only LAS versions 1.1 - 1.3 and point formats 0 - 3 are supported. For LAZ support see below. +This is a pure Julia alternative to [LibLAS.jl](https://github.com/visr/LibLAS.jl) or [Laszip.jl](https://github.com/joa-quim/Laszip.jl). Currently LAS versions 1.1 - 1.4 and point formats 0 - 10 are supported. For LAZ support see below. If the file fits into memory, it can be loaded using diff --git a/REQUIRE b/REQUIRE index f47fb6c..e4ce4b1 100644 --- a/REQUIRE +++ b/REQUIRE @@ -3,3 +3,4 @@ ColorTypes 0.7 FixedPointNumbers 0.5 FileIO 1.0 GeometryTypes 0.6 +StaticArrays diff --git a/appveyor.yml b/appveyor.yml index 9e2192c..9aa56ab 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,5 @@ environment: matrix: - - julia_version: 0.7 - julia_version: 1.0 - julia_version: nightly diff --git a/src/LasIO.jl b/src/LasIO.jl index 8331c25..a5b01d4 100644 --- a/src/LasIO.jl +++ b/src/LasIO.jl @@ -6,17 +6,26 @@ using FileIO using FixedPointNumbers using ColorTypes using GeometryTypes # for conversion +using StaticArrays export # Types LasHeader, LasVariableLengthRecord, + ExtendedLasVariableLengthRecord, LasPoint, LasPoint0, LasPoint1, LasPoint2, LasPoint3, + LasPoint4, + LasPoint5, + LasPoint6, + LasPoint7, + LasPoint8, + LasPoint9, + LasPoint10, PointVector, # Functions on LasHeader @@ -49,12 +58,14 @@ export blue, RGB +include("fixedstrings.jl") +include("meta.jl") include("vlrs.jl") include("header.jl") -include("meta.jl") include("point.jl") include("util.jl") include("fileio.jl") +include("waveform.jl") include("srs.jl") function __init__() diff --git a/src/fileio.jl b/src/fileio.jl index a730721..f1fd36d 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -1,15 +1,23 @@ using Mmap +pointformats = Dict( + 0x00 => LasPoint0, + 0x01 => LasPoint1, + 0x02 => LasPoint2, + 0x03 => LasPoint3, + 0x04 => LasPoint4, + 0x05 => LasPoint5, + 0x06 => LasPoint6, + 0x07 => LasPoint7, + 0x08 => LasPoint8, + 0x09 => LasPoint9, + 0x10 => LasPoint10 +) + function pointformat(header::LasHeader) id = header.data_format_id - if id == 0x00 - return LasPoint0 - elseif id == 0x01 - return LasPoint1 - elseif id == 0x02 - return LasPoint2 - elseif id == 0x03 - return LasPoint3 + if id in keys(pointformats) + return pointformats[id] else error("unsupported point format $(Int(id))") end @@ -28,8 +36,8 @@ end function load(s::Base.AbstractPipe) skiplasf(s) header = read(s, LasHeader) - - n = header.records_count + lv = VersionNumber(header.version_major, header.version_minor) + n = header.records_count_new pointtype = pointformat(header) pointdata = Vector{pointtype}(undef, n) for i=1:n @@ -41,9 +49,30 @@ end function load(s::Stream{format"LAS"}; mmap=false) skiplasf(s) header = read(s, LasHeader) + lv = VersionNumber(header.version_major, header.version_minor) - n = header.records_count + n = header.records_count_new pointtype = pointformat(header) + extra_bytes = header.data_record_length - sizeof(pointtype) + + # Determine extra bytes + if extra_bytes != 0 && 4 in keys(header.variable_length_records) + ebs = header.variable_length_records[4].data # extra_byte structures + total_size = sum([sizeof(eb.data_type) for eb in ebs]) + + if total_size > extra_bytes + # this renders this VLR invalid according to spec + warn("Extra bytes mismatch, skipping all extra bytes.") + else + # this is allowed according to spec + total_size < extra_bytes && warn("There are undocumented extra bytes!") + + # generate new point type structure to read + newfields = [(Symbol(eb.name), eb.data_type) for eb in ebs] + pointtype = gen_append_struct(pointtype, newfields) + extra_bytes -= total_size + end + end if mmap pointsize = Int(header.data_record_length) @@ -53,7 +82,21 @@ function load(s::Stream{format"LAS"}; mmap=false) pointdata = Vector{pointtype}(undef, n) for i=1:n pointdata[i] = read(s, pointtype) + extra_bytes > 0 && read(s, extra_bytes) # skip extra bytes + end + end + + # Extended Variable Length Records for 1.3 and 1.4 + if lv == v"1.3" && header.waveform_offset > 0 + evlr = read(s, ExtendedLasVariableLengthRecord) + header.variable_length_records[evlr.record_id] = evlr + elseif lv == v"1.4" && header.n_evlr > 0 + for i=1:header.n_evlr + evlr = read(s, ExtendedLasVariableLengthRecord) + header.variable_length_records[evlr.record_id] = evlr end + else + nothing end header, pointdata @@ -86,7 +129,7 @@ end function save(s::Stream{format"LAS"}, header::LasHeader, pointdata::AbstractVector{<:LasPoint}) # checks - header_n = header.records_count + header_n = header.records_count_new n = length(pointdata) msg = "number of records in header ($header_n) does not match data length ($n)" @assert header_n == n msg @@ -94,7 +137,6 @@ function save(s::Stream{format"LAS"}, header::LasHeader, pointdata::AbstractVect # write header write(s, magic(format"LAS")) write(s, header) - # write points for p in pointdata write(s, p) diff --git a/src/fixedstrings.jl b/src/fixedstrings.jl new file mode 100644 index 0000000..723deba --- /dev/null +++ b/src/fixedstrings.jl @@ -0,0 +1,57 @@ +""" + FixedString{N<:Int}(str, truncate=false, nullterm=true) + +A string type with a fixed maximum size `N` in bytes. This is useful for +serializing to and from binary data formats containing fixed length strings. +When constructing a FixedString, if `truncate` is true, the input string will +be truncated to fit into the number of bytes `N`. If `nullterm` is true, ensure +that the string has length strictly less than `N`, to fit in a single +terminating byte. + +When a FixedString{N} is serialized using `read()` and `write()`, exactly `N` +bytes are read and written, with any padding being set to '0'. +""" +struct FixedString{N} <: AbstractString + str::String + + function (::Type{FixedString{N}})(str::String) where N + n = sizeof(str) + n <= N || throw(ArgumentError("sizeof(str) = $n does not fit into a FixedString{$N}")) + return new{N}(str) + end +end + +function (::Type{FixedString{N}})(str::AbstractString; nullterm=true, truncate=false) where N + maxbytes = nullterm ? N-1 : N + if sizeof(str) > maxbytes + truncate || throw(ArgumentError("sizeof(str) = $(sizeof(str)) too long for FixedString{$N}")) + strunc = String(str[1:maxbytes]) + while sizeof(strunc) > maxbytes + strunc = strunc[1:end-1] # Needed for non-ascii chars + end + return FixedString{N}(String(strunc)) + else + return FixedString{N}(String(str)) + end +end + +# Minimal AbstractString required interface +Base.sizeof(f::FixedString{N}) where {N} = N +Base.iterate(f::FixedString{N}) where {N} = iterate(f.str) +Base.iterate(f::FixedString{N}, state::Integer) where {N} = iterate(f.str, state) + +# Be permissive by setting nullterm to false for reading by default. +function Base.read(io::IO, ::Type{FixedString{N}}; nullterm=false) where {N} + bytes = zeros(UInt8, N) + bytes = read!(io, bytes) + i = findfirst(isequal(0), bytes) + idx = i === nothing ? N : i - 1 + FixedString{N}(String(bytes[1:idx]), nullterm=nullterm) +end + +function Base.write(io::IO, f::FixedString{N}) where {N} + write(io, f.str) + for i=1:N-sizeof(f.str) + write(io, UInt8(0)) + end +end diff --git a/src/header.jl b/src/header.jl index 63ec912..21cd7cd 100644 --- a/src/header.jl +++ b/src/header.jl @@ -14,17 +14,26 @@ Backward compatibility with LAS 1.1 – LAS 1.3 when payloads consist of only le content =# +# abstract type LasHeader ; end +hsizes = Dict( + v"1.0"=>227, + v"1.1"=>227, + v"1.2"=>227, + v"1.3"=>235, + v"1.4"=>375 + ) + mutable struct LasHeader file_source_id::UInt16 global_encoding::UInt16 guid_1::UInt32 guid_2::UInt16 guid_3::UInt16 - guid_4::AbstractString + guid_4::FixedString{8} version_major::UInt8 version_minor::UInt8 - system_id::AbstractString - software_id::AbstractString + system_id::FixedString{32} + software_id::FixedString{32} creation_doy::UInt16 creation_year::UInt16 header_size::UInt16 @@ -33,7 +42,7 @@ mutable struct LasHeader data_format_id::UInt8 data_record_length::UInt16 records_count::UInt32 - point_return_count::Vector{UInt32} + point_return_count::Vector{UInt32} # 15 x_scale::Float64 y_scale::Float64 z_scale::Float64 @@ -46,77 +55,37 @@ mutable struct LasHeader y_min::Float64 z_max::Float64 z_min::Float64 - variable_length_records::Vector{LasVariableLengthRecord} + # ASPRS LAS 1.3 + waveform_offset::UInt64 + # ASPRS LAS 1.4 + evlr_offset::UInt64 + n_evlr::UInt64 + records_count_new::UInt64 + point_return_count_new::Vector{UInt64} # 15 + + # VLRs + variable_length_records::Dict{UInt16, Union{LasVariableLengthRecord, ExtendedLasVariableLengthRecord}} + + # Header can have extra bits user_defined_bytes::Vector{UInt8} end function Base.show(io::IO, header::LasHeader) - n = Int(header.records_count) + n = Int(header.records_count_new) println(io, "LasHeader with $n points.") - println(io, string("\tfile_source_id = ", header.file_source_id)) - println(io, string("\tglobal_encoding = ", header.global_encoding)) - println(io, string("\tguid_1 = ", header.guid_1)) - println(io, string("\tguid_2 = ", header.guid_2)) - println(io, string("\tguid_3 = ", header.guid_3)) - println(io, string("\tguid_4 = ", header.guid_4)) - println(io, string("\tversion_major = ", header.version_major)) - println(io, string("\tversion_minor = ", header.version_minor)) - println(io, string("\tsystem_id = ", header.system_id)) - println(io, string("\tsoftware_id = ", header.software_id)) - println(io, string("\tcreation_doy = ", header.creation_doy)) - println(io, string("\tcreation_year = ", header.creation_year)) - println(io, string("\theader_size = ", header.header_size)) - println(io, string("\tdata_offset = ", header.data_offset)) - println(io, string("\tn_vlr = ", header.n_vlr)) - println(io, string("\tdata_format_id = ", header.data_format_id)) - println(io, string("\tdata_record_length = ", header.data_record_length)) - println(io, string("\trecords_count = ", header.records_count)) - println(io, string("\tpoint_return_count = ", header.point_return_count)) - println(io, string("\tx_scale = ", header.x_scale)) - println(io, string("\ty_scale = ", header.y_scale)) - println(io, string("\tz_scale = ", header.z_scale)) - println(io, string("\tx_offset = ", header.x_offset)) - println(io, string("\ty_offset = ", header.y_offset)) - println(io, string("\tz_offset = ", header.z_offset)) - println(io, string("\tx_max = ", header.x_max)) - println(io, string("\tx_min = ", header.x_min)) - println(io, string("\ty_max = ", header.y_max)) - println(io, string("\ty_min = ", header.y_min)) - println(io, string("\tz_max = ", header.z_max)) - println(io, string("\tz_min = ", header.z_min)) - - if !isempty(header.variable_length_records) - nrecords = min(10, size(header.variable_length_records, 1)) - - println(io, string("\tvariable_length_records (max 10) = ")) - for vlr in header.variable_length_records[1:nrecords] - println(io, "\t\t($(vlr.user_id), $(vlr.record_id)) => ($(vlr.description), $(sizeof(vlr.data)) bytes...)") - end - println("\t\t...") - end -end - -function readstring(io, nb::Integer) - bytes = read(io, nb) - # strip possible null bytes - lastchar = findlast(bytes .!= 0) - if lastchar == nothing - return "" - else - return String(bytes[1:lastchar]) - end end -function writestring(io, str::AbstractString, nb::Integer) - n = length(str) - npad = nb - n - if npad < 0 - error("string too long") - elseif npad == 0 - write(io, str) - else - writestr = string(str * "\0"^npad) - write(io, writestr) +function showall(io::IO, h::LasHeader) + show(io, h) + for name in fieldnames(h) + if (name == :variable_length_records) || (name == :extended_variable_length_records) + println(io, string("\tvariable_length_records = ")) + for (_, vlr) in h.variable_length_records + println(io, "\t\t($(vlr.user_id), $(vlr.record_id)) => ($(vlr.description), $(vlr.record_length_after_header) bytes...)") + end + else + println(io, string("\t$name = $(getfield(h,name))")) + end end end @@ -126,11 +95,11 @@ function Base.read(io::IO, ::Type{LasHeader}) guid_1 = read(io, UInt32) guid_2 = read(io, UInt16) guid_3 = read(io, UInt16) - guid_4 = readstring(io, 8) + guid_4 = read(io, FixedString{8}) version_major = read(io, UInt8) version_minor = read(io, UInt8) - system_id = readstring(io, 32) - software_id = readstring(io, 32) + system_id = read(io, FixedString{32}) + software_id = read(io, FixedString{32}) creation_doy = read(io, UInt16) creation_year = read(io, UInt16) header_size = read(io, UInt16) @@ -152,19 +121,34 @@ function Base.read(io::IO, ::Type{LasHeader}) y_min = read(io, Float64) z_max = read(io, Float64) z_min = read(io, Float64) - lasversion = VersionNumber(version_major, version_minor) - if lasversion >= v"1.3" - # start of waveform data record (unsupported) - _ = read(io, UInt64) + + # determine ASPRS format + lv = VersionNumber(version_major, version_minor) + # ASPRS LAS 1.3 + waveform_offset = lv >= v"1.3" ? read(io, UInt64) : 0 + + # ASPRS LAS 1.4 + evlr_offset = lv >= v"1.4" ? read(io, UInt64) : 0 + n_evlr = lv >= v"1.4" ? read(io, UInt32) : 0 + records_count_new = lv >= v"1.4" ? read(io, UInt64) : records_count + point_return_count_new = zeros(UInt64, 15) + if lv >= v"1.4" + point_return_count_new = read!(io, point_return_count_new) end - vlrs = [read(io, LasVariableLengthRecord, false) for i=1:n_vlr] - # From here until the data_offset everything is read in - # as user_defined_bytes. To avoid a seek that we cannot do on STDIN, + # Header could be longer than standard. To avoid a seek that we cannot do on STDIN, # we calculate how much to read in. - vlrlength = n_vlr == 0 ? 0 : sum(sizeof, vlrs) - pos = header_size + vlrlength - user_defined_bytes = read(io, data_offset - pos) + header_extra_size = header_size - hsizes[lv] + _ = header_extra_size > 0 ? read(io, header_extra_size) : Vector{UInt8}() + + vlrlist = [read(io, LasVariableLengthRecord) for _=1:n_vlr] + vlrs = Dict(v.record_id => v for v in vlrlist) + + # Skip any data remaining + vlrsize = length(vlrlist) > 0 ? sum(sizeof, vlrlist) : 0 + pos = header_size + vlrsize + vlr_extra_size = data_offset - pos + user_defined_bytes = vlr_extra_size > 0 ? read(io, vlr_extra_size) : Vector{UInt8}() # put it all in a type header = LasHeader( @@ -199,6 +183,11 @@ function Base.read(io::IO, ::Type{LasHeader}) y_min, z_max, z_min, + waveform_offset, + evlr_offset, + n_evlr, + records_count_new, + point_return_count_new, vlrs, user_defined_bytes ) @@ -210,16 +199,16 @@ function Base.write(io::IO, h::LasHeader) write(io, h.guid_1) write(io, h.guid_2) write(io, h.guid_3) - writestring(io, h.guid_4, 8) + write(io, h.guid_4) write(io, h.version_major) write(io, h.version_minor) - writestring(io, h.system_id, 32) - writestring(io, h.software_id, 32) + write(io, h.system_id) + write(io, h.software_id) write(io, h.creation_doy) write(io, h.creation_year) write(io, h.header_size) write(io, h.data_offset) - @assert length(h.variable_length_records) == h.n_vlr + @assert length(h.variable_length_records) == h.n_vlr + h.n_evlr write(io, h.n_vlr) write(io, h.data_format_id) write(io, h.data_record_length) @@ -243,10 +232,15 @@ function Base.write(io::IO, h::LasHeader) # start of waveform data record (unsupported) write(io, UInt64(0)) end - for i in 1:h.n_vlr - write(io, h.variable_length_records[i]) + + # Write VLRS + for k in sort(collect(keys(h.variable_length_records))) + vlr = h.variable_length_records[k] + typeof(vlr) == LasVariableLengthRecord && write(io, vlr) end + write(io, h.user_defined_bytes) + # note that for LAS 1.4 a few new parts need to be written # possibly introduce typed headers like the points nothing @@ -266,3 +260,7 @@ function is_wkt(h::LasHeader) end wkit_bit end + +function waveform_internal(h::LasHeader) + isodd((h.global_encoding >>> 1) & 0x0001) +end diff --git a/src/meta.jl b/src/meta.jl index d749959..d76ac6f 100644 --- a/src/meta.jl +++ b/src/meta.jl @@ -1,7 +1,9 @@ +"""Returns fieldtypes like fieldnames.""" +function fieldtypes(T::Type) return [fieldtype(T, i) for i = 1:length(fieldnames(T))] end + "Generate read (unpack) method for structs." function generate_read(T::Type) - fc = fieldcount(T) - types = [fieldtype(T, i) for i = 1:fc] + types = fieldtypes(T) # Create unpack function expression function_expression = :(function Base.read(io::IO, t::Type{$T}) end) @@ -27,8 +29,8 @@ function generate_write(T::Type) body_expression = quote end for t in fieldnames(T) - append!(body_expression.args, 0) # dummy with known length write_expression = :(write(io, T.$t)) + append!(body_expression.args, 0) # dummy with known length body_expression.args[end] = write_expression end @@ -61,3 +63,21 @@ macro gen_io(typ::Expr) push!(ret.args, :(generate_io($T))) return esc(ret) end + +"""Combines base struct with provided fieldnames and types to generate new struct.""" +function gen_append_struct(T::Type, extrafields::Vector{Tuple{Symbol, DataType}}) + name = gensym("$T") # this name has # in there (!) + struct_expression = :(struct $name <: LasPoint end) + + basefields = collect(zip(fieldnames(T), fieldtypes(T))) + fields = vcat(basefields, extrafields) + + for (fname, ftype) in fields + field = Expr(:(::), fname, ftype) + append!(struct_expression.args[3].args, 0) # dummy with known length + struct_expression.args[3].args[end] = field + end + + eval(struct_expression) + eval(name) +end diff --git a/src/point.jl b/src/point.jl index 9f774a2..d9655a7 100644 --- a/src/point.jl +++ b/src/point.jl @@ -42,6 +42,8 @@ function Base.show(io::IO, pointdata::AbstractVector{<:LasPoint}) println(io, "$(typeof(pointdata)) with $n points.") end +Base.sizeof(p::T) where T<:LasPoint = sum(sizeof, fieldtypes(p)) + "ASPRS LAS point data record format 0" @gen_io struct LasPoint0 <: LasPoint x::Int32 @@ -102,6 +104,177 @@ end blue::N0f16 end +# Added in ASPRS LAS 1.3 +""""ASPRS LAS point data record format 4 +which adds waveform packets to format 1.""" +@gen_io struct LasPoint4 <: LasPoint + # Format 1 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + flag_byte::UInt8 + raw_classification::UInt8 + scan_angle::Int8 + user_data::UInt8 + pt_src_id::UInt16 + gps_time::Float64 + # Format 4 + wave_packet_descriptor_index::UInt8 + waveform_data_offset::UInt64 + waveform_packet_size::UInt32 + waveform_return_point_location::Float32 # ps + xt::Float32 + yt::Float32 + zt::Float32 +end + + +"""ASPRS LAS point data record format 5 +which adds waveform packets to format 3.""" +@gen_io struct LasPoint5 <: LasPoint + # Format 3 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + flag_byte::UInt8 + raw_classification::UInt8 + scan_angle::Int8 + user_data::UInt8 + pt_src_id::UInt16 + gps_time::Float64 + red::N0f16 + green::N0f16 + blue::N0f16 + # Format 5 + wave_packet_descriptor_index::UInt8 + waveform_data_offset::UInt64 + waveform_packet_size::UInt32 + waveform_return_point_location::Float32 # ps + xt::Float32 + yt::Float32 + zt::Float32 +end + +# Added in ASPRS LAS 1.4 +""""ASPRS LAS point data record format 6 +which serves as a core 30 bytes for formats 6-10.""" +@gen_io struct LasPoint6 <: LasPoint + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + return_byte::UInt8 # return number, # of returns + classification_byte::UInt8 # class flags, scanner channel, scan direction, edge of flightline + raw_classification::UInt8 + user_data::UInt8 + scan_angle::Int16 + pt_src_id::UInt16 + gps_time::Float64 +end + +""""ASPRS LAS point data record format 7 +which adds RGB to format 6.""" +@gen_io struct LasPoint7 <: LasPoint + # Format 6 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + return_byte::UInt8 # return number, # of returns + classification_byte::UInt8 # class flags, scanner channel, scan direction, edge of flightline + raw_classification::UInt8 + user_data::UInt8 + scan_angle::Int16 + pt_src_id::UInt16 + gps_time::Float64 + # Format 7 + red::N0f16 + green::N0f16 + blue::N0f16 +end + +""""ASPRS LAS point data record format 8 +which adds NIR to format 7.""" +@gen_io struct LasPoint8 <: LasPoint + # Format 6 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + return_byte::UInt8 # return number, # of returns + classification_byte::UInt8 # class flags, scanner channel, scan direction, edge of flightline + raw_classification::UInt8 + user_data::UInt8 + scan_angle::Int16 + pt_src_id::UInt16 + gps_time::Float64 + # Format 7 + red::N0f16 + green::N0f16 + blue::N0f16 + # Format 8 + nir::N0f16 +end + +""""ASPRS LAS point data record format 9 +which adds wave packets to format 6.""" +@gen_io struct LasPoint9 <: LasPoint + # Format 6 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + return_byte::UInt8 # return number, # of returns + classification_byte::UInt8 # class flags, scanner channel, scan direction, edge of flightline + raw_classification::UInt8 + user_data::UInt8 + scan_angle::Int16 + pt_src_id::UInt16 + gps_time::Float64 + # Format 9 + wave_packet_descriptor_index::UInt8 + waveform_data_offset::UInt64 + waveform_packet_size::UInt32 + waveform_return_point_location::Float32 # ps + xt::Float32 + yt::Float32 + zt::Float32 +end + + +""""ASPRS LAS point data record format 10 +which adds wave packets to format 8.""" +@gen_io struct LasPoint10 <: LasPoint + # Format 6 + x::Int32 + y::Int32 + z::Int32 + intensity::UInt16 + return_byte::UInt8 # return number, # of returns + classification_byte::UInt8 # class flags, scanner channel, scan direction, edge of flightline + raw_classification::UInt8 + user_data::UInt8 + scan_angle::Int16 + pt_src_id::UInt16 + gps_time::Float64 + # Format 7 + red::N0f16 + green::N0f16 + blue::N0f16 + # Format 8 + nir::N0f16 + # Format 10 + wave_packet_descriptor_index::UInt8 + waveform_data_offset::UInt64 + waveform_packet_size::UInt32 + waveform_return_point_location::Float32 # ps + xt::Float32 + yt::Float32 + zt::Float32 +end + # for convenience in function signatures const LasPointColor = Union{LasPoint2,LasPoint3} const LasPointTime = Union{LasPoint1,LasPoint3} @@ -114,6 +287,12 @@ function Base.show(io::IO, p::LasPoint) println(io, "LasPoint(x=$x, y=$y, z=$z, classification=$cl)") end +function showall(io::IO, h::LasPoint) + for name in fieldnames(h) + println(io, string("\t$name = $(getfield(h,name))")) + end +end + # Extend base by enabling reading/writing relevant FixedPointNumbers from IO. Base.read(io::IO, ::Type{N0f16}) = reinterpret(N0f16, read(io, UInt16)) Base.write(io::IO, t::N0f16) = write(io, reinterpret(UInt16, t)) diff --git a/src/srs.jl b/src/srs.jl index a25823a..ad13ec5 100644 --- a/src/srs.jl +++ b/src/srs.jl @@ -25,26 +25,21 @@ struct GeoDoubleParamsTag double_params::Vector{Float64} end -struct GeoAsciiParamsTag - ascii_params::String - nb::Int # number of bytes - GeoAsciiParamsTag(s::AbstractString, nb::Integer) = new(ascii(s), Int(nb)) -end - const id_geokeydirectorytag = UInt16(34735) const id_geodoubleparamstag = UInt16(34736) const id_geoasciiparamstag = UInt16(34737) "test whether a vlr is a GeoKeyDirectoryTag, GeoDoubleParamsTag or GeoAsciiParamsTag" -is_srs(vlr::LasVariableLengthRecord) = vlr.record_id in ( +is_srs(vlr::Union{ExtendedLasVariableLengthRecord, LasVariableLengthRecord}) = vlr.record_id in ( id_geokeydirectorytag, id_geodoubleparamstag, id_geoasciiparamstag) +is_srs(::UInt16, vlr::Union{ExtendedLasVariableLengthRecord, LasVariableLengthRecord}) = is_srs(vlr) +is_srs(pair::Pair{UInt16,Union{LasIO.ExtendedLasVariableLengthRecord, LasIO.LasVariableLengthRecord}}) = is_srs(pair.first, pair.second) # number of bytes Base.sizeof(data::GeoKeys) = 8 * Int(data.number_of_keys) + 8 Base.sizeof(data::GeoDoubleParamsTag) = sizeof(data.double_params) -Base.sizeof(data::GeoAsciiParamsTag) = data.nb "Construct a projection VLR based on an EPSG code" function LasVariableLengthRecord(header::LasHeader, srid::SRID) @@ -81,7 +76,6 @@ function Base.write(io::IO, data::GeoKeys) end Base.write(io::IO, data::GeoDoubleParamsTag) = write(io, data.double_params) -Base.write(io::IO, data::GeoAsciiParamsTag) = writestring(io, data.ascii_params, data.nb) "Create GeoKeys from EPSG code. Assumes CRS is projected and in meters." function GeoKeys(epsg::Integer) @@ -107,11 +101,10 @@ function epsg_code(header::LasHeader) throw(ArgumentError("WKT format projection information not implemented")) end vlrs = header.variable_length_records - ind = findfirst(x -> x.record_id == id_geokeydirectorytag, vlrs) - if ind === nothing + if !(id_geokeydirectorytag in vlrs.keys) nothing else - vlrs[ind].data.keys[3].value_offset + vlrs[id_geokeydirectorytag].data.keys[3].value_offset end end @@ -125,40 +118,25 @@ function epsg_code!(header::LasHeader, epsg::Integer) end # read old header metadata - old_vlrlength = header.n_vlr == 0 ? 0 : sum(sizeof, header.variable_length_records) + old_vlrlength = header.n_vlr == 0 ? 0 : sum(sizeof, values(header.variable_length_records)) old_offset = header.data_offset # reconstruct VLRs - vlrs = LasVariableLengthRecord[] - srid = SRID(:epsg,epsg) - push!(vlrs, LasVariableLengthRecord(header, srid)) # keep existing non-SRS VLRs intact - append!(vlrs, filter(!is_srs, header.variable_length_records)) + vlrs = filter(!is_srs, header.variable_length_records) + srid = SRID(:epsg,epsg) + proj = LasVariableLengthRecord(header, srid) + vlrs[proj.record_id] = proj # update header header.variable_length_records = vlrs header.n_vlr = length(header.variable_length_records) - new_vlrlength = header.n_vlr == 0 ? 0 : sum(sizeof, header.variable_length_records) + new_vlrlength = header.n_vlr == 0 ? 0 : sum(sizeof, values(header.variable_length_records)) # update offset to point data, assuming the VLRs come before the data, i.e. not extended VLR header.data_offset = old_offset - old_vlrlength + new_vlrlength header end -function read_vlr_data(io::IO, record_id::Integer, nb::Integer) - if record_id == id_geokeydirectorytag - return read(io, GeoKeys) - elseif record_id == id_geodoubleparamstag - double_params = zeros(nb ÷ 8) - read!(io, double_params) - return GeoDoubleParamsTag(double_params) - elseif record_id == id_geoasciiparamstag - ascii_params = readstring(io, nb) - return GeoAsciiParamsTag(ascii_params, nb) - else - return read(io, nb) - end -end - function Base.read(io::IO, ::Type{GeoKeys}) key_directory_version = read(io, UInt16) key_reversion = read(io, UInt16) diff --git a/src/vlrs.jl b/src/vlrs.jl index 09ce50e..b3b1aa8 100644 --- a/src/vlrs.jl +++ b/src/vlrs.jl @@ -4,30 +4,108 @@ organization defined binary metadata in LAS files. """ struct LasVariableLengthRecord reserved::UInt16 - user_id::AbstractString + user_id::FixedString{16} record_id::UInt16 - description::AbstractString + record_length_after_header::UInt16 + description::FixedString{32} data # anything with read+write+sizeof methods, like GeoKeys or Vector{UInt8} end +LasVariableLengthRecord(r::UInt16, s::String, i::UInt16, d::String, x::Any) = LasVariableLengthRecord(r, FixedString{16}(s), i, sizeof(x), FixedString{32}(d), x) + +""" +A LAS "extended variable length record" - the generic way to store large +extra user or organization defined binary metadata in LAS files. +""" +struct ExtendedLasVariableLengthRecord + reserved::UInt16 + user_id::FixedString{16} # 16 bytes + record_id::UInt16 + record_length_after_header::UInt64 + description::FixedString{32} # 32 bytes + data # anything with read+write+sizeof methods, like GeoKeys or Vector{UInt8} +end + +ExtendedLasVariableLengthRecord(r::UInt16, s::String, i::UInt16, d::String, x::Any) = LasVariableLengthRecord(r, FixedString{16}(s), i, sizeof(x), FixedString{32}(d), x) + +function Base.show(io::IO, vlr::Union{LasVariableLengthRecord, ExtendedLasVariableLengthRecord}) + println(io, "Variable length record with id: $(vlr.record_id), description: $(vlr.description)") +end + +"""Read VLR data record, with specific branches for known record ids.""" +function read_vlr_data(io::IO, record_id::Integer, nb::Integer) + + # classification + if record_id == 0 + @assert nb == 256 * 16 "Size of classification data VLR is wrong." + return [read(io, Classification) for _=1:256] + + # description + elseif record_id == 3 + return read(io, FixedString{nb}) + + # extra bytes + elseif record_id == 4 + n_fields = Int(nb // 192) + return [read(io, ExtraBytes) for _=1:n_fields] + + # spatial reference records + elseif record_id == id_geokeydirectorytag + return read(io, GeoKeys) + elseif record_id == id_geodoubleparamstag + double_params = zeros(nb ÷ 8) + read!(io, double_params) + return GeoDoubleParamsTag(double_params) + elseif record_id == id_geoasciiparamstag + return read(io, FixedString{nb}) + + # waveform descriptor for LAS 1.3 and 1.4 + elseif (100 <= record_id < 355) + return read(io, waveform_descriptor) + else + return read(io, nb) + end +end + # Read a variable length metadata record from a stream. # # If `extended` is true, the VLR is one of the extended VLR types specified in # the LAS 1.4 spec which can be larger and come after the point data. -function Base.read(io::IO, ::Type{LasVariableLengthRecord}, extended::Bool=false) +function Base.read(io::IO, ::Type{LasVariableLengthRecord}) # `reserved` is meant to be 0 according to the LAS spec 1.4, but earlier # versions set it to 0xAABB. Whatever, I guess we just store&ignore for now. # See https://groups.google.com/forum/#!topic/lasroom/SVtNBA2y9iI reserved = read(io, UInt16) - user_id = readstring(io, 16) + user_id = read(io, FixedString{16}) record_id = read(io, UInt16) - record_data_length::Int = extended ? read(io, UInt64) : read(io, UInt16) - description = readstring(io, 32) + record_data_length = read(io, UInt16) + description = read(io, FixedString{32}) data = read_vlr_data(io, record_id, record_data_length) LasVariableLengthRecord( reserved, user_id, record_id, + record_data_length, + description, + data + ) +end + +function Base.read(io::IO, ::Type{ExtendedLasVariableLengthRecord}) + # `reserved` is meant to be 0 according to the LAS spec 1.4, but earlier + # versions set it to 0xAABB. Whatever, I guess we just store&ignore for now. + # See https://groups.google.com/forum/#!topic/lasroom/SVtNBA2y9iI + reserved = read(io, UInt16) + user_id = read(io, FixedString{16}) + record_id = read(io, UInt16) + record_data_length = read(io, UInt64) + description = read(io, FixedString{32}) + data = read_vlr_data(io, record_id, record_data_length) + ExtendedLasVariableLengthRecord( + reserved, + user_id, + record_id, + record_data_length, description, data ) @@ -35,15 +113,126 @@ end function Base.write(io::IO, vlr::LasVariableLengthRecord, extended::Bool=false) write(io, vlr.reserved) - writestring(io, vlr.user_id, 16) + write(io, vlr.user_id) write(io, vlr.record_id) record_data_length = extended ? UInt64(sizeof(vlr.data)) : UInt16(sizeof(vlr.data)) write(io, record_data_length) - writestring(io, vlr.description, 32) + write(io, vlr.description) write(io, vlr.data) nothing end # size of a VLR in bytes # assumes it is not extended VLR -Base.sizeof(vlr::LasVariableLengthRecord) = 54 + sizeof(vlr.data) +Base.sizeof(vlr::LasVariableLengthRecord) = 54 + vlr.record_length_after_header +Base.sizeof(vlr::ExtendedLasVariableLengthRecord) = 60 + vlr.record_length_after_header + +"""LASF_Spec record id 0.""" +@gen_io struct Classification + class_number::UInt8 + description::FixedString{15} +end + +"""LASF_Spec record id 4 data struct.""" +struct ExtraBytes{T<:Real} + data_type::DataType + reserved::UInt16 # 2 bytes + data_type_key::UInt8 # 1 byte + options::UInt8 # 1 byte + name::FixedString{32} # 32 bytes + unused::SVector{4, UInt8} # 4 bytes + no_data::SVector{3, T} # 24 = 3*8 bytes + min::SVector{3, T} # 24 = 3*8 bytes + max::SVector{3, T} # 24 = 3*8 bytes + scale::SVector{3, Float64} # 24 = 3*8 bytes + offset::SVector{3, Float64} # 24 = 3*8 bytes + description::FixedString{32} # 32 bytes +end + +datatypes = Dict( + # 0x00 => special case + # SVector{1,} + 0x01 => UInt8, + 0x02 => Int8, + 0x03 => UInt16, + 0x04 => Int16, + 0x05 => UInt32, + 0x06 => Int32, + 0x07 => UInt64, + 0x08 => Int64, + 0x09 => Float32, + 0x0a => Float64, + # SVector{2,} + 0x0b => SVector{2, UInt8}, + 0x0c => SVector{2, Int8}, + 0x0d => SVector{2, UInt16}, + 0x0e => SVector{2, Int16}, + 0x0f => SVector{2, UInt32}, + 0x10 => SVector{2, Int32}, + 0x11 => SVector{2, UInt64}, + 0x12 => SVector{2, Int64}, + 0x13 => SVector{2, Float32}, + 0x14 => SVector{2, Float64}, + # SVector{3,} + 0x15 => SVector{3, UInt8}, + 0x16 => SVector{3, Int8}, + 0x17 => SVector{3, UInt16}, + 0x18 => SVector{3, Int16}, + 0x19 => SVector{3, UInt32}, + 0x1a => SVector{3, Int32}, + 0x1b => SVector{3, UInt64}, + 0x1c => SVector{3, Int64}, + 0x1d => SVector{3, Float32}, + 0x1e => SVector{3, Float64}) + +"""Determine upcasted type for extra_bytes struct.""" +function upcasttype(t::UInt8) + if t in (9, 10, 19, 20, 29, 30) + return Float64 + elseif iseven(t) + return Int64 + else + return UInt64 + end +end + +function Base.read(io::IO, ::Type{ExtraBytes}) + reserved = read(io, UInt16) + data_type_key = read(io, UInt8) + options = read(io, UInt8) + name = read(io, FixedString{32}) + # lowercase with _ for use as fieldname + name = FixedString{32}(replace(lowercase(name), " ", "_")) + unused = read(io, SVector{4, UInt8}) + + # determine datatype + if data_type_key == 0 + data_type = SVector{Int(options), UInt8} + elseif data_type_key in keys(datatypes) + data_type = datatypes[data_type_key] + else + error("Invalid extra_bytes structure.") + end + upcast_data_type = upcasttype(data_type_key) + + no_data = read(io, SVector{3, upcast_data_type}) # 24 = 3*8 bytes + min = read(io, SVector{3, upcast_data_type}) # 24 = 3*8 bytes + max = read(io, SVector{3, upcast_data_type}) # 24 = 3*8 bytes + scale = read(io, SVector{3, Float64}) # 24 = 3*8 bytes + offset = read(io, SVector{3, Float64}) # 24 = 3*8 bytes + description = read(io, FixedString{32}) # 32 bytes + ExtraBytes( + data_type, + reserved, + data_type_key, + options, + name, + unused, + no_data, + min, + max, + scale, + offset, + description + ) +end diff --git a/src/waveform.jl b/src/waveform.jl new file mode 100644 index 0000000..e1f1cbb --- /dev/null +++ b/src/waveform.jl @@ -0,0 +1,49 @@ +"""Waveform Packet Descriptor User Defined Record. +User ID: LASF_Spec Record ID: n: where n > 99 and n <355.""" +@gen_io struct waveform_descriptor + bits_sample::UInt8 # size of sample in bits + compression_type::UInt8 + n_samples::UInt32 + temp_spacing::UInt32 # The temporal sample spacing in picoseconds + digitizer_gain::Float64 + digitizer_offset::Float64 +end + +waveform_sample_types = Dict( + UInt8(8) => UInt8, + UInt8(16) => UInt16, + UInt8(32) => UInt32 +) + +function waveform(p::LasPoint, header::LasHeader) + @assert LasIO.waveform_internal(header) "Only internal waveforms are supported." + @assert 0xffff in keys(header.variable_length_records) "No internal waveforms found." + evlr = header.variable_length_records[0xffff] + + # get waveform descriptor + record = UInt16(p.wave_packet_descriptor_index + 99) + @assert record in keys(header.variable_length_records) "Waveform descriptor #$record not found." + wfd = header.variable_length_records[record] + wfdd = wfd.data + @assert wfdd.bits_sample in (8, 16, 32) "Samples with #$(wfdd.bits_sample) bits not supported." + + # get raw waveform + wf_start = p.waveform_data_offset - 60 + size = p.waveform_packet_size + raw_waveform = evlr.data[(wf_start+1):(wf_start + size)] + @assert wfdd.bits_sample * wfdd.n_samples / 8 <= length(raw_waveform) + raw_waveform = reinterpret(waveform_sample_types[wfdd.bits_sample], raw_waveform) + + # calculate coordinates and real values for each sample + waves = Array{Float32}(length(raw_waveform), 5) + distances = Vector{Float32}(length(raw_waveform)) + for (is, sample) in enumerate(raw_waveform) + dist = p.waveform_return_point_location - is * wfdd.temp_spacing + x = p.x + dist * p.xt + y = p.y + dist * p.yt + z = p.z + dist * p.zt + v = muladd(wfdd.digitizer_gain, sample, wfdd.digitizer_offset) # Float64 + waves[is, :] .= dist, x, y, z, v + end + waves +end diff --git a/test/fixedstrings.jl b/test/fixedstrings.jl new file mode 100644 index 0000000..e67ea7f --- /dev/null +++ b/test/fixedstrings.jl @@ -0,0 +1,16 @@ +import LasIO.FixedString + +@testset "FixedString" begin + @test read(IOBuffer("asdf\0\0\0\0"), FixedString{8})::FixedString{8} == "asdf" + @test read(IOBuffer("asdf\0\0\0\0"), FixedString{4})::FixedString{4} == "asdf" + @test read(IOBuffer("\0\0\0\0"), FixedString{4}) == "" + + buf = IOBuffer() + write(buf, FixedString{6}("qwer")) + @test String(take!(buf)) == "qwer\0\0" + + @test_throws ArgumentError FixedString{4}("asdfasdf") + @test FixedString{4}("asdfasdf", truncate=true) == "asd" + @test FixedString{4}("asdfasdf", truncate=true, nullterm=false) == "asdf" +end + diff --git a/test/las1.3.las b/test/las1.3.las new file mode 100644 index 0000000..ff0bed0 Binary files /dev/null and b/test/las1.3.las differ diff --git a/test/las1.4.las b/test/las1.4.las new file mode 100644 index 0000000..154234d Binary files /dev/null and b/test/las1.4.las differ diff --git a/test/meta.jl b/test/meta.jl new file mode 100644 index 0000000..2ed97a0 --- /dev/null +++ b/test/meta.jl @@ -0,0 +1,10 @@ +using FileIO +using LasIO +using Base.Test + +@testset "Meta functions" begin + newfields = [(:testfield, Float64)] + + nt = LasIO.gen_append_struct(LasPoint0, newfields) + @test :testfield in fieldnames(nt) +end diff --git a/test/runtests.jl b/test/runtests.jl index e770937..ddc80e3 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,13 +2,11 @@ using FileIO using LasIO using Test +@testset "LasIO" begin include("stream.jl") +include("fixedstrings.jl") workdir = dirname(@__FILE__) -# source: http://www.liblas.org/samples/ -filename = "libLAS_1.2.las" # point format 0 -testfile = joinpath(workdir, filename) -writefile = joinpath(workdir, "libLAS_1.2-out.las") "Find the centroid of all points in a LAS file" function centroid(io, header) @@ -35,103 +33,123 @@ function centroid(io, header) x_avg, y_avg, z_avg end -# reading point by point -open(testfile) do io - # magic bytes - @test String(read(io, 4)) == "LASF" - header = read(io, LasHeader) - - seek(io, header.data_offset) - x_avg, y_avg, z_avg = centroid(io, header) - - @test x_avg ≈ 1442694.2739025319 - @test y_avg ≈ 377449.24373880465 - @test z_avg ≈ 861.60254888088491 - - seek(io, header.data_offset) - p = read(io, LasPoint0) - - @test xcoord(p, header) ≈ 1.44013394e6 - @test xcoord(1.44013394e6, header) ≈ p.x - @test ycoord(p, header) ≈ 375000.23 - @test ycoord(375000.23, header) ≈ p.y - @test zcoord(p, header) ≈ 846.66 - @test zcoord(846.66, header) ≈ p.z - @test intensity(p) === 0x00fa - @test scan_angle(p) === Int8(0) - @test user_data(p) === 0x00 - @test pt_src_id(p) === 0x001d - @test return_number(p) === 0x00 - @test number_of_returns(p) === 0x00 - @test scan_direction(p) === false - @test edge_of_flight_line(p) === false - @test classification(p) === 0x02 - @test synthetic(p) === false - @test key_point(p) === false - @test withheld(p) === false - - # raw bytes composed of bit fields - @test flag_byte(p) === 0x00 - @test raw_classification(p) === 0x02 - - # recompose bytes with bit fields - @test flag_byte(return_number(p),number_of_returns(p),scan_direction(p),edge_of_flight_line(p)) === p.flag_byte - @test raw_classification(classification(p),synthetic(p),key_point(p),withheld(p)) === p.raw_classification - - # TODO GPS time, colors (not in this test file, is point data format 0) -end +@testset "Single point" begin + # reading point by point + # source: http://www.liblas.org/samples/ + filename = "libLAS_1.2.las" # point format 0 + testfile = joinpath(workdir, filename) + writefile = joinpath(workdir, "libLAS_1.2-out.las") + + open(testfile) do io + # magic bytes + @test String(read(io, 4)) == "LASF" + header = read(io, LasHeader) + + seek(io, header.data_offset) + x_avg, y_avg, z_avg = centroid(io, header) + + @test x_avg ≈ 1442694.2739025319 + @test y_avg ≈ 377449.24373880465 + @test z_avg ≈ 861.60254888088491 -# reading complete file into memory -# test if output file matches input file -header, pointdata = load(testfile) -n = length(pointdata) -save(writefile, header, pointdata) -@test hash(read(testfile)) == hash(read(writefile)) -rm(writefile) - -# testing a las file version 1.0 point format 1 file with VLRs -srsfile = joinpath(workdir, "srs.las") -srsfile_out = joinpath(workdir, "srs-out.las") -srsheader, srspoints = load(srsfile) -for record in srsheader.variable_length_records - @test record.reserved === 0xaabb - @test record.user_id == "LASF_Projection" - @test typeof(record.description) == String - if record.record_id == 34735 - @test record.data.key_directory_version === UInt16(1) - @test record.data.key_reversion === UInt16(1) - @test record.data.minor_revision === UInt16(0) - @test record.data.number_of_keys === UInt16(length((record.data.keys))) - @test typeof(record.data.keys) == Vector{LasIO.KeyEntry} + seek(io, header.data_offset) + p = read(io, LasPoint0) + + @test xcoord(p, header) ≈ 1.44013394e6 + @test xcoord(1.44013394e6, header) ≈ p.x + @test ycoord(p, header) ≈ 375000.23 + @test ycoord(375000.23, header) ≈ p.y + @test zcoord(p, header) ≈ 846.66 + @test zcoord(846.66, header) ≈ p.z + @test intensity(p) === 0x00fa + @test scan_angle(p) === Int8(0) + @test user_data(p) === 0x00 + @test pt_src_id(p) === 0x001d + @test return_number(p) === 0x00 + @test number_of_returns(p) === 0x00 + @test scan_direction(p) === false + @test edge_of_flight_line(p) === false + @test classification(p) === 0x02 + @test synthetic(p) === false + @test key_point(p) === false + @test withheld(p) === false + + # raw bytes composed of bit fields + @test flag_byte(p) === 0x00 + @test raw_classification(p) === 0x02 + + # recompose bytes with bit fields + @test flag_byte(return_number(p),number_of_returns(p),scan_direction(p),edge_of_flight_line(p)) === p.flag_byte + @test raw_classification(classification(p),synthetic(p),key_point(p),withheld(p)) === p.raw_classification + + # TODO GPS time, colors (not in this test file, is point data format 0) end end -@test srsheader.version_major == 1 -@test srsheader.version_minor == 0 -@test srsheader.data_format_id == 1 -@test srsheader.n_vlr == 3 -@test isa(srsheader.variable_length_records, Vector{LasVariableLengthRecord}) -for vlr in srsheader.variable_length_records - @test vlr.reserved === 0xaabb - @test vlr.user_id == "LASF_Projection" - @test vlr.description == "" + +@testset "Complete file" begin + # reading complete file into memory + # source: http://www.liblas.org/samples/ + filename = "libLAS_1.2.las" # point format 0 + testfile = joinpath(workdir, filename) + writefile = joinpath(workdir, "libLAS_1.2-out.las") + + # test if output file matches input file + header, pointdata = load(testfile) + @test LasIO.epsg_code(header) === nothing + n = length(pointdata) + save(writefile, header, pointdata) + @test hash(read(testfile)) == hash(read(writefile)) + rm(writefile) +end + +@testset "File Format 1 with VLRS" begin + # test + # testing a las file version 1.0 point format 1 file with VLRs + srsfile = joinpath(workdir, "srs.las") + srsfile_out = joinpath(workdir, "srs-out.las") + srsheader, srspoints = load(srsfile) + for (_, record) in srsheader.variable_length_records + @test record.reserved === 0xaabb + @test record.user_id == "LASF_Projection" + @test typeof(record.description) == LasIO.FixedString{32} + if record.record_id == 34735 + @test record.data.key_directory_version === UInt16(1) + @test record.data.key_reversion === UInt16(1) + @test record.data.minor_revision === UInt16(0) + @test record.data.number_of_keys === UInt16(length((record.data.keys))) + @test typeof(record.data.keys) == Vector{LasIO.KeyEntry} + end + end + + @test srsheader.version_major == 1 + @test srsheader.version_minor == 0 + @test srsheader.data_format_id == 1 + @test srsheader.n_vlr == 3 + @test isa(srsheader.variable_length_records, Dict{UInt16, Union{LasVariableLengthRecord, ExtendedLasVariableLengthRecord}}) + for (_, vlr) in srsheader.variable_length_records + @test vlr.reserved === 0xaabb + @test vlr.user_id == "LASF_Projection" + @test vlr.description == "" + end + + @test srsheader.variable_length_records[LasIO.id_geokeydirectorytag].record_id == LasIO.id_geokeydirectorytag + @test srsheader.variable_length_records[LasIO.id_geodoubleparamstag].record_id == LasIO.id_geodoubleparamstag + @test srsheader.variable_length_records[LasIO.id_geoasciiparamstag].record_id == LasIO.id_geoasciiparamstag + @test typeof(srsheader.variable_length_records[LasIO.id_geokeydirectorytag].data) == LasIO.GeoKeys + @test typeof(srsheader.variable_length_records[LasIO.id_geodoubleparamstag].data) == LasIO.GeoDoubleParamsTag + @test typeof(srsheader.variable_length_records[LasIO.id_geoasciiparamstag].data) == FixedString{0x0100} + + @test LasIO.epsg_code(srsheader) === UInt16(32617) + # set the SRS. Note: this will not change points, but merely set SRS-metadata. + epsgheader = deepcopy(srsheader) + LasIO.epsg_code!(epsgheader, 32633) # set to WGS 84 / UTM zone 33N, not the actual SRS + @test epsgheader.variable_length_records[LasIO.id_geokeydirectorytag].record_id == LasIO.id_geokeydirectorytag + @test count(LasIO.is_srs, srsheader.variable_length_records) == 3 + @test count(LasIO.is_srs, epsgheader.variable_length_records) == 1 + + save(srsfile_out, srsheader, srspoints) + @test hash(read(srsfile)) == hash(read(srsfile_out)) + rm(srsfile_out) end -@test srsheader.variable_length_records[1].record_id == LasIO.id_geokeydirectorytag -@test srsheader.variable_length_records[2].record_id == LasIO.id_geodoubleparamstag -@test srsheader.variable_length_records[3].record_id == LasIO.id_geoasciiparamstag -@test typeof(srsheader.variable_length_records[1].data) == LasIO.GeoKeys -@test typeof(srsheader.variable_length_records[2].data) == LasIO.GeoDoubleParamsTag -@test typeof(srsheader.variable_length_records[3].data) == LasIO.GeoAsciiParamsTag - -@test LasIO.epsg_code(header) === nothing -@test LasIO.epsg_code(srsheader) === UInt16(32617) -# set the SRS. Note: this will not change points, but merely set SRS-metadata. -epsgheader = deepcopy(srsheader) -LasIO.epsg_code!(epsgheader, 32633) # set to WGS 84 / UTM zone 33N, not the actual SRS -@test epsgheader.variable_length_records[1].record_id == LasIO.id_geokeydirectorytag -@test count(LasIO.is_srs, srsheader.variable_length_records) == 3 -@test count(LasIO.is_srs, epsgheader.variable_length_records) == 1 - -save(srsfile_out, srsheader, srspoints) -@test hash(read(srsfile)) == hash(read(srsfile_out)) -rm(srsfile_out) +end diff --git a/test/stream.jl b/test/stream.jl index 0cf58cb..4daa1ab 100644 --- a/test/stream.jl +++ b/test/stream.jl @@ -2,29 +2,30 @@ using FileIO using LasIO using Test -workdir = dirname(@__FILE__) -# source: http://www.liblas.org/samples/ -filename = "libLAS_1.2.las" # point format 0 -testfile = joinpath(workdir, filename) -writefile = joinpath(workdir, "libLAS_1.2-out.las") +@testset "Streaming LAS files" begin + workdir = dirname(@__FILE__) + # source: http://www.liblas.org/samples/ + filename = "libLAS_1.2.las" # point format 0 + testfile = joinpath(workdir, filename) + writefile = joinpath(workdir, "libLAS_1.2-out.las") -# test if output file matches input file -header, pointdata = load(testfile, mmap=true) -n = length(pointdata) -save(writefile, header, pointdata) -@test hash(read(testfile)) == hash(read(writefile)) -rm(writefile) + # test if output file matches input file + header, pointdata = load(testfile, mmap=true) + save(writefile, header, pointdata) + @test hash(read(testfile)) == hash(read(writefile)) + rm(writefile) -# testing a las file version 1.0 point format 1 file with VLRs -srsfile = joinpath(workdir, "srs.las") -srsfile_out = joinpath(workdir, "srs-out.las") -srsheader, srspoints = load(srsfile, mmap=true) -save(srsfile_out, srsheader, srspoints) -@test hash(read(srsfile)) == hash(read(srsfile_out)) -rm(srsfile_out) + # testing a las file version 1.0 point format 1 file with VLRs + srsfile = joinpath(workdir, "srs.las") + srsfile_out = joinpath(workdir, "srs-out.las") + srsheader, srspoints = load(srsfile, mmap=true) + save(srsfile_out, srsheader, srspoints) + @test hash(read(srsfile)) == hash(read(srsfile_out)) + # rm(srsfile_out) -# Test editing stream file -srsfile = joinpath(workdir, "srs.las") -srsfile_out = joinpath(workdir, "srs-out.las") -srsheader, srspoints = load(srsfile, mmap=true) -@test_throws ErrorException srspoints[5] = LasPoint1(1,1,1,1,1,1,1,1,1,1.0) + # Test editing stream file + srsfile = joinpath(workdir, "srs.las") + srsfile_out = joinpath(workdir, "srs-out.las") + srsheader, srspoints = load(srsfile, mmap=true) + @test_throws ErrorException srspoints[5] = LasPoint1(1,1,1,1,1,1,1,1,1,1.0) +end