From 0c32deeafd8317a8c000eb92c0abc069f45cd86a Mon Sep 17 00:00:00 2001 From: Tom Clune Date: Fri, 13 Mar 2020 17:59:50 -0400 Subject: [PATCH] Added examples. --- ChangeLog.md | 11 +- Examples/CMakeLists.txt | 4 + Examples/Iterators/CMakeLists.txt | 6 + Examples/Iterators/iterator.F90 | 65 ++++++ Examples/Iterators/iterator.yaml | 15 ++ Examples/Simple/CMakeLists.txt | 6 + Examples/Simple/simple.F90 | 107 +++++++++ Examples/Simple/simple.yaml | 25 +++ Examples/Trivial/CMakeLists.txt | 6 + Examples/Trivial/trivial.F90 | 19 ++ Examples/Trivial/trivial.yaml | 3 + tests/Test_Lexer.pf | 358 +++++++++++++++--------------- 12 files changed, 449 insertions(+), 176 deletions(-) create mode 100644 Examples/CMakeLists.txt create mode 100644 Examples/Iterators/CMakeLists.txt create mode 100644 Examples/Iterators/iterator.F90 create mode 100644 Examples/Iterators/iterator.yaml create mode 100644 Examples/Simple/CMakeLists.txt create mode 100644 Examples/Simple/simple.F90 create mode 100644 Examples/Simple/simple.yaml create mode 100644 Examples/Trivial/CMakeLists.txt create mode 100644 Examples/Trivial/trivial.F90 create mode 100644 Examples/Trivial/trivial.yaml diff --git a/ChangeLog.md b/ChangeLog.md index e4bc6a6..7d638c0 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -9,10 +9,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Added ## Fixed +## [0.2.1] - 2020-03-13 + +## Added + +- Added examples - apparently failed to commit thes previously. Build + with 'make Examples', and go to the build directory to run. + ## [0.2.0] - 2020-03-11 -## Added +### Added - New interfaces for Configuration::get() . allow for default values and testing if present @@ -22,7 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Improved error handling throughout . still needs more work. -## Fixed +### Fixed - Some errors in lexing were exposed with pFlogger use cases. diff --git a/Examples/CMakeLists.txt b/Examples/CMakeLists.txt new file mode 100644 index 0000000..c88b167 --- /dev/null +++ b/Examples/CMakeLists.txt @@ -0,0 +1,4 @@ +add_subdirectory (Trivial) +add_subdirectory (Simple) +add_subdirectory (Iterators) + diff --git a/Examples/Iterators/CMakeLists.txt b/Examples/Iterators/CMakeLists.txt new file mode 100644 index 0000000..7eabb66 --- /dev/null +++ b/Examples/Iterators/CMakeLists.txt @@ -0,0 +1,6 @@ +add_executable (iterator.x iterator.F90) +target_link_libraries (iterator.x yafyaml gftl-shared) +add_dependencies (examples iterator.x) + +FILE (COPY iterator.yaml DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + diff --git a/Examples/Iterators/iterator.F90 b/Examples/Iterators/iterator.F90 new file mode 100644 index 0000000..fd74dd1 --- /dev/null +++ b/Examples/Iterators/iterator.F90 @@ -0,0 +1,65 @@ +! This example does minimal error checking. The point is to +! demonstrate how to use iterators. In the absence of exceptions, +! error checking becomes verbose and obscures the main features. + +program main + use yafyaml + use gFTL_IntegerVector + use gftl_StringIntegerMap + implicit none + + type(Parser) :: p + type(Configuration) :: config + type(Configuration) :: subcfg + type(Configuration) :: shape_cfg + + type(ConfigurationIterator) :: iter, iter2 + + integer :: status + character(:), pointer :: key + integer :: prime + character(:), pointer :: shape + integer :: n_edges + + + p = Parser('core') + config = p%load(FileStream('iterator.yaml')) + + ! Outer loop over mapping + iter = config%begin() + do while (iter /= config%end()) + ! Access mapping values with key()/value() methods on iterator. + key => iter%key() + subcfg = iter%value() + + select case (key) + case ('primes') + + ! loop over primes (a yaml sequence) + iter2 = subcfg%begin() + do while (iter2 /= subcfg%end()) + ! Access sequence values with get() method on iterator + prime = iter2%get() ! cast as integer + print*,'another prime: ', prime + call iter2%next() + end do + + case ('shapes') + + ! loop over shapes (a yaml mapping) + iter2 = subcfg%begin() + do while (iter2 /= subcfg%end()) + shape => iter2%key() + shape_cfg = iter2%value() + n_edges = shape_cfg%at('num_edges') + print*,'shape: ',shape,' has ',n_edges, 'sides' + call iter2%next() + end do + + end select + + + call iter%next() + end do + +end program main diff --git a/Examples/Iterators/iterator.yaml b/Examples/Iterators/iterator.yaml new file mode 100644 index 0000000..4b46b24 --- /dev/null +++ b/Examples/Iterators/iterator.yaml @@ -0,0 +1,15 @@ +primes: + - 2 + - 3 + - 5 + - 7 +shapes: + triangle: + num_edges: 3 + edge_length: 3.12 + square: + num_edges: 4 + edge_length: 6.8 + + + diff --git a/Examples/Simple/CMakeLists.txt b/Examples/Simple/CMakeLists.txt new file mode 100644 index 0000000..0a44f42 --- /dev/null +++ b/Examples/Simple/CMakeLists.txt @@ -0,0 +1,6 @@ +add_executable (simple.x simple.F90) +target_link_libraries (simple.x yafyaml gftl-shared) +add_dependencies (examples simple.x) + +FILE (COPY simple.yaml DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + diff --git a/Examples/Simple/simple.F90 b/Examples/Simple/simple.F90 new file mode 100644 index 0000000..228df33 --- /dev/null +++ b/Examples/Simple/simple.F90 @@ -0,0 +1,107 @@ +program main + use yafyaml + use gFTL_IntegerVector + use gftl_StringIntegerMap + implicit none + + type(Parser) :: p + type(Configuration) :: config + type(Configuration) :: subconfig + + real :: x + character(:), allocatable :: name + logical :: flag + + integer, allocatable :: sequence_a(:) + integer, allocatable :: sequence_b(:) + + integer :: v1, v2, v3 + + logical :: is_present + integer :: status + + + p = Parser('core') + config = p%load(FileStream('simple.yaml')) + + x = config%at('x') + + if (x == 1.234) then + print*,'success' + else + print*,'failure; expected 1.234 but found ', x + end if + + + flag = .false. + flag = config%at('flag') + + if (flag) then + print*,'success' + else + print*,'failure; expected .true.' + end if + + sequence_a = config%at('sequence_a') + + if (all (sequence_a == [1,2,3,4])) then + print*,'success' + else + print*,'failure in handling flow sequence; expected .true.' + end if + + sequence_b = config%at('sequence_b') + + if (all (sequence_b == [1,2,3,4])) then + print*,'success' + else + print*,'failure in handling block sequence; expected .true.' + end if + + ! Flow mapping + v1 = config%at('mapping_a', 'v1') + v2 = config%at('mapping_a', 'v2') + + if (v1 == 7 .and. v2 == 8) then + print*,'success' + else + print*,'failure in handling flow mapping', v1, v2 + end if + + ! Block mapping + v1 = config%at('mapping_b', 'v1') + v2 = config%at('mapping_b', 'v2') + + if (v1 == 7 .and. v2 == 8) then + print*,'success' + else + print*,'failure in handling flow mapping', v1, v2 + end if + + call config%get(subconfig, 'mapping_b') + + v1 = -1 + call config%get(v1, 'mapping_b', 'v1', is_present=is_present, rc=status) + if (v1 == 7 .and. is_present .and. status == SUCCESS) then + print*,'success' + else + print*,'failure in handling flow mapping', v1, v2 + end if + + ! Handle missing values + call config%get(v3, 'mapping_b', 'v3', default=17, is_present=is_present, rc=status) + if (v3 == 17 .and. (.not. is_present) .and. status == SUCCESS) then + print*,'success' + else + print*,'failure in handling flow mapping' + end if + + ! error if wrong type: + call config%get(flag, 'mapping_b', 'v2', is_present=is_present, rc=status) + if (is_present .and. status == INCONSISTENT_TYPE) then + print*,'expected failure' + else + print*,'should have failed, but did not' + end if + +end program main diff --git a/Examples/Simple/simple.yaml b/Examples/Simple/simple.yaml new file mode 100644 index 0000000..db62b54 --- /dev/null +++ b/Examples/Simple/simple.yaml @@ -0,0 +1,25 @@ +--- +# exercising the basics and non-nested sequences/mappings +x: 1.234 +name: Fred +flag: True + +# flow sequence +sequence_a: [ 1 , 2 , 3 , 4 ] + +# block sequence +sequence_b: + - 1 + - 2 + - 3 + - 4 + +# block mapping +mapping_b: + v1: 7 + v2: 8 + +# flow mapping +mapping_a: { v1: 7 , v2: 8 } + +... diff --git a/Examples/Trivial/CMakeLists.txt b/Examples/Trivial/CMakeLists.txt new file mode 100644 index 0000000..04ed18c --- /dev/null +++ b/Examples/Trivial/CMakeLists.txt @@ -0,0 +1,6 @@ +add_executable (trivial.x trivial.F90) +target_link_libraries (trivial.x yafyaml) +add_dependencies (examples trivial.x) + +FILE (COPY trivial.yaml DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + diff --git a/Examples/Trivial/trivial.F90 b/Examples/Trivial/trivial.F90 new file mode 100644 index 0000000..14b7d7c --- /dev/null +++ b/Examples/Trivial/trivial.F90 @@ -0,0 +1,19 @@ +program main + use yafyaml + + type(Parser) :: p + type(Configuration) :: config + integer :: prime + + + p = Parser('core') + config = p%load(FileStream('trivial.yaml')) + + prime = config%at('prime') + + if (prime == 17) then + print*,'success' + else + print*,'failure; expected 17 but found ', prime + end if +end program main diff --git a/Examples/Trivial/trivial.yaml b/Examples/Trivial/trivial.yaml new file mode 100644 index 0000000..2691f1f --- /dev/null +++ b/Examples/Trivial/trivial.yaml @@ -0,0 +1,3 @@ +--- +prime: 17 +... diff --git a/tests/Test_Lexer.pf b/tests/Test_Lexer.pf index d3458d6..1a21640 100644 --- a/tests/Test_Lexer.pf +++ b/tests/Test_Lexer.pf @@ -1,269 +1,279 @@ module Test_Lexer - use, intrinsic :: iso_c_binding, only: NL => C_NEW_LINE - use yaFyaml - use funit - use fy_TokenVector - implicit none + use, intrinsic :: iso_c_binding, only: NL => C_NEW_LINE + use yaFyaml + use funit + use fy_TokenVector + implicit none contains - @test - subroutine test_is_at_document_start() - type(Lexer) :: lexr + @test + subroutine test_is_at_document_start() + type(Lexer) :: lexr - lexr = Lexer(Reader(TextStream("---"))) - @assertTrue(lexr%is_at_document_boundary('---')) + lexr = Lexer(Reader(TextStream("---"))) + @assertTrue(lexr%is_at_document_boundary('---')) - lexr = Lexer(Reader(TextStream("- -"))) - @assertFalse(lexr%is_at_document_boundary('---')) + lexr = Lexer(Reader(TextStream("- -"))) + @assertFalse(lexr%is_at_document_boundary('---')) - ! Must start in 0th column - lexr = Lexer(Reader(TextStream(" ---"))) - @assertFalse(lexr%is_at_document_boundary('---')) + ! Must start in 0th column + lexr = Lexer(Reader(TextStream(" ---"))) + @assertFalse(lexr%is_at_document_boundary('---')) - ! cannot have regular character immediately next - lexr = Lexer(Reader(TextStream("---a"))) - @assertFalse(lexr%is_at_document_boundary('---')) - - end subroutine test_is_at_document_start - - - @test - subroutine test_is_at_document_end() - type(Lexer) :: lexr + ! cannot have regular character immediately next + lexr = Lexer(Reader(TextStream("---a"))) + @assertFalse(lexr%is_at_document_boundary('---')) - lexr = Lexer(Reader(TextStream("..."))) - @assertTrue(lexr%is_at_document_boundary('...')) + end subroutine test_is_at_document_start - lexr = Lexer(Reader(TextStream(". ."))) - @assertFalse(lexr%is_at_document_boundary('...')) - ! Must end in 0th column - lexr = Lexer(Reader(TextStream(" ..."))) - @assertFalse(lexr%is_at_document_boundary('...')) + @test + subroutine test_is_at_document_end() + type(Lexer) :: lexr - ! cannot have regular character immediately next - lexr = Lexer(Reader(TextStream("...a"))) - @assertFalse(lexr%is_at_document_boundary('...')) - - end subroutine test_is_at_document_end + lexr = Lexer(Reader(TextStream("..."))) + @assertTrue(lexr%is_at_document_boundary('...')) - @test - subroutine test_scan_to_next_token() - type(Lexer) :: lexr + lexr = Lexer(Reader(TextStream(". ."))) + @assertFalse(lexr%is_at_document_boundary('...')) - lexr = Lexer(Reader(TextStream("a"))) - call lexr%scan_to_next_token() + ! Must end in 0th column + lexr = Lexer(Reader(TextStream(" ..."))) + @assertFalse(lexr%is_at_document_boundary('...')) + + ! cannot have regular character immediately next + lexr = Lexer(Reader(TextStream("...a"))) + @assertFalse(lexr%is_at_document_boundary('...')) + + end subroutine test_is_at_document_end + + @test + subroutine test_scan_to_next_token() + type(Lexer) :: lexr + + lexr = Lexer(Reader(TextStream("a"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is('a')) + @assert_that(lexr%peek(), is('a')) #endif - lexr = Lexer(Reader(TextStream("b"))) - call lexr%scan_to_next_token() + lexr = Lexer(Reader(TextStream("b"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is('b')) + @assert_that(lexr%peek(), is('b')) #endif - lexr = Lexer(Reader(TextStream(" b"))) - call lexr%scan_to_next_token() + lexr = Lexer(Reader(TextStream(" b"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is('b')) + @assert_that(lexr%peek(), is('b')) #endif - lexr = Lexer(Reader(EscapedTextStream(" \n b"))) - call lexr%scan_to_next_token() + lexr = Lexer(Reader(EscapedTextStream(" \n b"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is('b')) + @assert_that(lexr%peek(), is('b')) #endif - lexr = Lexer(Reader(EscapedTextStream(" # comment \n :"))) - call lexr%scan_to_next_token() + lexr = Lexer(Reader(EscapedTextStream(" # comment \n :"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is(':')) + @assert_that(lexr%peek(), is(':')) #endif - lexr = Lexer(Reader(EscapedTextStream(" # comment \n \n# foo #\n -"))) - call lexr%scan_to_next_token() + lexr = Lexer(Reader(EscapedTextStream(" # comment \n \n# foo #\n -"))) + call lexr%scan_to_next_token() #ifdef __GFORTRAN__ #else - @assert_that(lexr%peek(), is('-')) + @assert_that(lexr%peek(), is('-')) #endif - end subroutine test_scan_to_next_token + end subroutine test_scan_to_next_token - @test - subroutine test_flow_sequence_end_token() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + @test + subroutine test_flow_sequence_end_token() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - lexr = Lexer(Reader(TextStream("]"))) + lexr = Lexer(Reader(TextStream("]"))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() ! skip stream start + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(']'))) + @assert_that(id, is(equal_to(']'))) #endif - - end subroutine test_flow_sequence_end_token - @test - subroutine test_flow_mapping_end_token() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + end subroutine test_flow_sequence_end_token + + @test + subroutine test_flow_mapping_end_token() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - lexr = Lexer(Reader(TextStream("}"))) + lexr = Lexer(Reader(TextStream("}"))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() ! skip stream start + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(FLOW_MAPPING_END_INDICATOR))) + @assert_that(id, is(equal_to(FLOW_MAPPING_END_INDICATOR))) #endif - end subroutine test_flow_mapping_end_token + end subroutine test_flow_mapping_end_token - @test - subroutine test_flow_next_entry() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + @test + subroutine test_flow_next_entry() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - lexr = Lexer(Reader(TextStream(","))) + lexr = Lexer(Reader(TextStream(","))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() ! skip stream start + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(FLOW_NEXT_ENTRY_INDICATOR))) + @assert_that(id, is(equal_to(FLOW_NEXT_ENTRY_INDICATOR))) #endif - - end subroutine test_flow_next_entry - - @test - subroutine test_block_next_entry() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id - - lexr = Lexer(Reader(TextStream("- a"))) - - token = lexr%get_token() ! skip stream start - token = lexr%get_token() - id = token%get_id() - @assertEqual(id, "") - token = lexr%get_token() - id = token%get_id() + + end subroutine test_flow_next_entry + + @test + subroutine test_block_next_entry() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id + + lexr = Lexer(Reader(TextStream("- a"))) + + token = lexr%get_token() ! skip stream start + token = lexr%get_token() + id = token%get_id() + @assertEqual(id, "") + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(BLOCK_NEXT_ENTRY_INDICATOR))) + @assert_that(id, is(equal_to(BLOCK_NEXT_ENTRY_INDICATOR))) #endif - end subroutine test_block_next_entry + end subroutine test_block_next_entry - @test - subroutine test_value() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + @test + subroutine test_value() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - lexr = Lexer(Reader(TextStream("a:"))) + lexr = Lexer(Reader(TextStream("a:"))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() ! document start - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() ! skip stream start + token = lexr%get_token() ! document start + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(KEY_INDICATOR))) + @assert_that(id, is(equal_to(KEY_INDICATOR))) #endif - token = lexr%get_token() ! scalar token - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() ! scalar token + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(VALUE_INDICATOR))) + @assert_that(id, is(equal_to(VALUE_INDICATOR))) #endif - end subroutine test_value + end subroutine test_value - @test - subroutine test_plain_scalar() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + @test + subroutine test_plain_scalar() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - lexr = Lexer(Reader(TextStream("abc d "))) + lexr = Lexer(Reader(TextStream("abc d "))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() - id = token%get_id() - @assert_that(id, is(equal_to(''))) + token = lexr%get_token() ! skip stream start + token = lexr%get_token() + id = token%get_id() + @assert_that(id, is(equal_to(''))) - select type (token) - type is (ScalarToken) - @assert_that(token%is_plain, is(true())) + select type (token) + type is (ScalarToken) + @assert_that(token%is_plain, is(true())) #ifdef __GFORTRAN__ #else - @assert_that(token%value, is(equal_to('abc d'))) + @assert_that(token%value, is(equal_to('abc d'))) #endif - end select + end select - end subroutine test_plain_scalar + end subroutine test_plain_scalar - @test - subroutine test_is_value() - type(Lexer) :: lexr - class(AbstractToken), allocatable :: token - character(:), allocatable :: id + @test + subroutine test_is_value() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id - ! flow context - lexr = Lexer(Reader(TextStream(" a : b"))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() ! skip block mapping start - token = lexr%get_token() ! skip key - token = lexr%get_token() ! skip scalar "a" - token = lexr%get_token() - id = token%get_id() + ! flow context + lexr = Lexer(Reader(TextStream(" a : b"))) + token = lexr%get_token() ! skip stream start + token = lexr%get_token() ! skip block mapping start + token = lexr%get_token() ! skip key + token = lexr%get_token() ! skip scalar "a" + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(':'))) + @assert_that(id, is(equal_to(':'))) #endif - ! block context - no space after ":" - lexr = Lexer(Reader(TextStream("{? a : b}"))) - token = lexr%get_token() ! skip stream start - token = lexr%get_token() ! "{" - id = token%get_id() + ! block context - no space after ":" + lexr = Lexer(Reader(TextStream("{? a : b}"))) + token = lexr%get_token() ! skip stream start + token = lexr%get_token() ! "{" + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to('{'))) + @assert_that(id, is(equal_to('{'))) #endif - token = lexr%get_token() ! key marker - id = token%get_id() + token = lexr%get_token() ! key marker + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to('?'))) + @assert_that(id, is(equal_to('?'))) #endif - token = lexr%get_token() ! a - id = token%get_id() + token = lexr%get_token() ! a + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(''))) + @assert_that(id, is(equal_to(''))) #endif - token = lexr%get_token() - id = token%get_id() + token = lexr%get_token() + id = token%get_id() #ifdef __GFORTRAN__ #else - @assert_that(id, is(equal_to(':'))) + @assert_that(id, is(equal_to(':'))) #endif - - end subroutine test_is_value + + end subroutine test_is_value + + @ + subroutine test_anchor() + type(Lexer) :: lexr + class(AbstractToken), allocatable :: token + character(:), allocatable :: id + + lexr = Lexer(Reader(EscapedTextStream(" a : b"))) + token = lexr%get_token() ! skip stream start + end module Test_Lexer