4
0

lexer.go 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. package ssh_config
  2. import (
  3. "io"
  4. buffruneio "github.com/pelletier/go-buffruneio"
  5. )
  6. // Define state functions
  7. type sshLexStateFn func() sshLexStateFn
  8. type sshLexer struct {
  9. input *buffruneio.Reader // Textual source
  10. buffer []rune // Runes composing the current token
  11. tokens chan token
  12. line uint32
  13. col uint16
  14. endbufferLine uint32
  15. endbufferCol uint16
  16. }
  17. func (s *sshLexer) lexComment(previousState sshLexStateFn) sshLexStateFn {
  18. return func() sshLexStateFn {
  19. growingString := ""
  20. for next := s.peek(); next != '\n' && next != eof; next = s.peek() {
  21. if next == '\r' && s.follow("\r\n") {
  22. break
  23. }
  24. growingString += string(next)
  25. s.next()
  26. }
  27. s.emitWithValue(tokenComment, growingString)
  28. s.skip()
  29. return previousState
  30. }
  31. }
  32. // lex the space after an equals sign in a function
  33. func (s *sshLexer) lexRspace() sshLexStateFn {
  34. for {
  35. next := s.peek()
  36. if !isSpace(next) {
  37. break
  38. }
  39. s.skip()
  40. }
  41. return s.lexRvalue
  42. }
  43. func (s *sshLexer) lexEquals() sshLexStateFn {
  44. for {
  45. next := s.peek()
  46. if next == '=' {
  47. s.emit(tokenEquals)
  48. s.skip()
  49. return s.lexRspace
  50. }
  51. // TODO error handling here; newline eof etc.
  52. if !isSpace(next) {
  53. break
  54. }
  55. s.skip()
  56. }
  57. return s.lexRvalue
  58. }
  59. func (s *sshLexer) lexKey() sshLexStateFn {
  60. growingString := ""
  61. for r := s.peek(); isKeyChar(r); r = s.peek() {
  62. // simplified a lot here
  63. if isSpace(r) || r == '=' {
  64. s.emitWithValue(tokenKey, growingString)
  65. s.skip()
  66. return s.lexEquals
  67. }
  68. growingString += string(r)
  69. s.next()
  70. }
  71. s.emitWithValue(tokenKey, growingString)
  72. return s.lexEquals
  73. }
  74. func (s *sshLexer) lexRvalue() sshLexStateFn {
  75. growingString := ""
  76. for {
  77. next := s.peek()
  78. switch next {
  79. case '\n':
  80. s.emitWithValue(tokenString, growingString)
  81. s.skip()
  82. return s.lexVoid
  83. case '#':
  84. s.emitWithValue(tokenString, growingString)
  85. s.skip()
  86. return s.lexComment(s.lexVoid)
  87. case eof:
  88. s.next()
  89. }
  90. if next == eof {
  91. break
  92. }
  93. growingString += string(next)
  94. s.next()
  95. }
  96. s.emit(tokenEOF)
  97. return nil
  98. }
  99. func (s *sshLexer) read() rune {
  100. r, _, err := s.input.ReadRune()
  101. if err != nil {
  102. panic(err)
  103. }
  104. if r == '\n' {
  105. s.endbufferLine++
  106. s.endbufferCol = 1
  107. } else {
  108. s.endbufferCol++
  109. }
  110. return r
  111. }
  112. func (s *sshLexer) next() rune {
  113. r := s.read()
  114. if r != eof {
  115. s.buffer = append(s.buffer, r)
  116. }
  117. return r
  118. }
  119. func (s *sshLexer) lexVoid() sshLexStateFn {
  120. for {
  121. next := s.peek()
  122. switch next {
  123. case '#':
  124. s.skip()
  125. return s.lexComment(s.lexVoid)
  126. case '\r':
  127. fallthrough
  128. case '\n':
  129. s.emit(tokenEmptyLine)
  130. s.skip()
  131. continue
  132. }
  133. if isSpace(next) {
  134. s.skip()
  135. }
  136. if isKeyStartChar(next) {
  137. return s.lexKey
  138. }
  139. // removed IsKeyStartChar and lexKey. probably will need to readd
  140. if next == eof {
  141. s.next()
  142. break
  143. }
  144. }
  145. s.emit(tokenEOF)
  146. return nil
  147. }
  148. func (s *sshLexer) ignore() {
  149. s.buffer = make([]rune, 0)
  150. s.line = s.endbufferLine
  151. s.col = s.endbufferCol
  152. }
  153. func (s *sshLexer) skip() {
  154. s.next()
  155. s.ignore()
  156. }
  157. func (s *sshLexer) emit(t tokenType) {
  158. s.emitWithValue(t, string(s.buffer))
  159. }
  160. func (s *sshLexer) emitWithValue(t tokenType, value string) {
  161. tok := token{
  162. Position: Position{s.line, s.col},
  163. typ: t,
  164. val: value,
  165. }
  166. s.tokens <- tok
  167. s.ignore()
  168. }
  169. func (s *sshLexer) peek() rune {
  170. r, _, err := s.input.ReadRune()
  171. if err != nil {
  172. panic(err)
  173. }
  174. s.input.UnreadRune()
  175. return r
  176. }
  177. func (s *sshLexer) follow(next string) bool {
  178. for _, expectedRune := range next {
  179. r, _, err := s.input.ReadRune()
  180. defer s.input.UnreadRune()
  181. if err != nil {
  182. panic(err)
  183. }
  184. if expectedRune != r {
  185. return false
  186. }
  187. }
  188. return true
  189. }
  190. func (s *sshLexer) run() {
  191. for state := s.lexVoid; state != nil; {
  192. state = state()
  193. }
  194. close(s.tokens)
  195. }
  196. func lexSSH(input io.Reader) chan token {
  197. bufferedInput := buffruneio.NewReader(input)
  198. l := &sshLexer{
  199. input: bufferedInput,
  200. tokens: make(chan token),
  201. line: 1,
  202. col: 1,
  203. endbufferLine: 1,
  204. endbufferCol: 1,
  205. }
  206. go l.run()
  207. return l.tokens
  208. }