You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

240 lines
4.0 KiB

  1. package ssh_config
  2. import (
  3. "bytes"
  4. )
  5. // Define state functions
  6. type sshLexStateFn func() sshLexStateFn
  7. type sshLexer struct {
  8. inputIdx int
  9. input []rune // Textual source
  10. buffer []rune // Runes composing the current token
  11. tokens chan token
  12. line int
  13. col int
  14. endbufferLine int
  15. endbufferCol int
  16. }
  17. func (s *sshLexer) lexComment(previousState sshLexStateFn) sshLexStateFn {
  18. return func() sshLexStateFn {
  19. growingString := ""
  20. for next := s.peek(); next != '\n' && next != eof; next = s.peek() {
  21. if next == '\r' && s.follow("\r\n") {
  22. break
  23. }
  24. growingString += string(next)
  25. s.next()
  26. }
  27. s.emitWithValue(tokenComment, growingString)
  28. s.skip()
  29. return previousState
  30. }
  31. }
  32. // lex the space after an equals sign in a function
  33. func (s *sshLexer) lexRspace() sshLexStateFn {
  34. for {
  35. next := s.peek()
  36. if !isSpace(next) {
  37. break
  38. }
  39. s.skip()
  40. }
  41. return s.lexRvalue
  42. }
  43. func (s *sshLexer) lexEquals() sshLexStateFn {
  44. for {
  45. next := s.peek()
  46. if next == '=' {
  47. s.emit(tokenEquals)
  48. s.skip()
  49. return s.lexRspace
  50. }
  51. // TODO error handling here; newline eof etc.
  52. if !isSpace(next) {
  53. break
  54. }
  55. s.skip()
  56. }
  57. return s.lexRvalue
  58. }
  59. func (s *sshLexer) lexKey() sshLexStateFn {
  60. growingString := ""
  61. for r := s.peek(); isKeyChar(r); r = s.peek() {
  62. // simplified a lot here
  63. if isSpace(r) || r == '=' {
  64. s.emitWithValue(tokenKey, growingString)
  65. s.skip()
  66. return s.lexEquals
  67. }
  68. growingString += string(r)
  69. s.next()
  70. }
  71. s.emitWithValue(tokenKey, growingString)
  72. return s.lexEquals
  73. }
  74. func (s *sshLexer) lexRvalue() sshLexStateFn {
  75. growingString := ""
  76. for {
  77. next := s.peek()
  78. switch next {
  79. case '\r':
  80. if s.follow("\r\n") {
  81. s.emitWithValue(tokenString, growingString)
  82. s.skip()
  83. return s.lexVoid
  84. }
  85. case '\n':
  86. s.emitWithValue(tokenString, growingString)
  87. s.skip()
  88. return s.lexVoid
  89. case '#':
  90. s.emitWithValue(tokenString, growingString)
  91. s.skip()
  92. return s.lexComment(s.lexVoid)
  93. case eof:
  94. s.next()
  95. }
  96. if next == eof {
  97. break
  98. }
  99. growingString += string(next)
  100. s.next()
  101. }
  102. s.emit(tokenEOF)
  103. return nil
  104. }
  105. func (s *sshLexer) read() rune {
  106. r := s.peek()
  107. if r == '\n' {
  108. s.endbufferLine++
  109. s.endbufferCol = 1
  110. } else {
  111. s.endbufferCol++
  112. }
  113. s.inputIdx++
  114. return r
  115. }
  116. func (s *sshLexer) next() rune {
  117. r := s.read()
  118. if r != eof {
  119. s.buffer = append(s.buffer, r)
  120. }
  121. return r
  122. }
  123. func (s *sshLexer) lexVoid() sshLexStateFn {
  124. for {
  125. next := s.peek()
  126. switch next {
  127. case '#':
  128. s.skip()
  129. return s.lexComment(s.lexVoid)
  130. case '\r':
  131. fallthrough
  132. case '\n':
  133. s.emit(tokenEmptyLine)
  134. s.skip()
  135. continue
  136. }
  137. if isSpace(next) {
  138. s.skip()
  139. }
  140. if isKeyStartChar(next) {
  141. return s.lexKey
  142. }
  143. // removed IsKeyStartChar and lexKey. probably will need to readd
  144. if next == eof {
  145. s.next()
  146. break
  147. }
  148. }
  149. s.emit(tokenEOF)
  150. return nil
  151. }
  152. func (s *sshLexer) ignore() {
  153. s.buffer = make([]rune, 0)
  154. s.line = s.endbufferLine
  155. s.col = s.endbufferCol
  156. }
  157. func (s *sshLexer) skip() {
  158. s.next()
  159. s.ignore()
  160. }
  161. func (s *sshLexer) emit(t tokenType) {
  162. s.emitWithValue(t, string(s.buffer))
  163. }
  164. func (s *sshLexer) emitWithValue(t tokenType, value string) {
  165. tok := token{
  166. Position: Position{s.line, s.col},
  167. typ: t,
  168. val: value,
  169. }
  170. s.tokens <- tok
  171. s.ignore()
  172. }
  173. func (s *sshLexer) peek() rune {
  174. if s.inputIdx >= len(s.input) {
  175. return eof
  176. }
  177. r := s.input[s.inputIdx]
  178. return r
  179. }
  180. func (s *sshLexer) follow(next string) bool {
  181. inputIdx := s.inputIdx
  182. for _, expectedRune := range next {
  183. if inputIdx >= len(s.input) {
  184. return false
  185. }
  186. r := s.input[inputIdx]
  187. inputIdx++
  188. if expectedRune != r {
  189. return false
  190. }
  191. }
  192. return true
  193. }
  194. func (s *sshLexer) run() {
  195. for state := s.lexVoid; state != nil; {
  196. state = state()
  197. }
  198. close(s.tokens)
  199. }
  200. func lexSSH(input []byte) chan token {
  201. runes := bytes.Runes(input)
  202. l := &sshLexer{
  203. input: runes,
  204. tokens: make(chan token),
  205. line: 1,
  206. col: 1,
  207. endbufferLine: 1,
  208. endbufferCol: 1,
  209. }
  210. go l.run()
  211. return l.tokens
  212. }