1// Copyright 2013 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package bufio
6
7import (
8	"bytes"
9	"errors"
10	"io"
11	"unicode/utf8"
12)
13
14// Scanner provides a convenient interface for reading data such as
15// a file of newline-delimited lines of text. Successive calls to
16// the Scan method will step through the 'tokens' of a file, skipping
17// the bytes between the tokens. The specification of a token is
18// defined by a split function of type SplitFunc; the default split
19// function breaks the input into lines with line termination stripped. Split
20// functions are defined in this package for scanning a file into
21// lines, bytes, UTF-8-encoded runes, and space-delimited words. The
22// client may instead provide a custom split function.
23//
24// Scanning stops unrecoverably at EOF, the first I/O error, or a token too
25// large to fit in the buffer. When a scan stops, the reader may have
26// advanced arbitrarily far past the last token. Programs that need more
27// control over error handling or large tokens, or must run sequential scans
28// on a reader, should use bufio.Reader instead.
29//
30type Scanner struct {
31	r            io.Reader // The reader provided by the client.
32	split        SplitFunc // The function to split the tokens.
33	maxTokenSize int       // Maximum size of a token; modified by tests.
34	token        []byte    // Last token returned by split.
35	buf          []byte    // Buffer used as argument to split.
36	start        int       // First non-processed byte in buf.
37	end          int       // End of data in buf.
38	err          error     // Sticky error.
39	empties      int       // Count of successive empty tokens.
40	scanCalled   bool      // Scan has been called; buffer is in use.
41	done         bool      // Scan has finished.
42}
43
44// SplitFunc is the signature of the split function used to tokenize the
45// input. The arguments are an initial substring of the remaining unprocessed
46// data and a flag, atEOF, that reports whether the Reader has no more data
47// to give. The return values are the number of bytes to advance the input
48// and the next token to return to the user, if any, plus an error, if any.
49//
50// Scanning stops if the function returns an error, in which case some of
51// the input may be discarded.
52//
53// Otherwise, the Scanner advances the input. If the token is not nil,
54// the Scanner returns it to the user. If the token is nil, the
55// Scanner reads more data and continues scanning; if there is no more
56// data--if atEOF was true--the Scanner returns. If the data does not
57// yet hold a complete token, for instance if it has no newline while
58// scanning lines, a SplitFunc can return (0, nil, nil) to signal the
59// Scanner to read more data into the slice and try again with a
60// longer slice starting at the same point in the input.
61//
62// The function is never called with an empty data slice unless atEOF
63// is true. If atEOF is true, however, data may be non-empty and,
64// as always, holds unprocessed text.
65type SplitFunc func(data []byte, atEOF bool) (advance int, token []byte, err error)
66
67// Errors returned by Scanner.
68var (
69	ErrTooLong         = errors.New("bufio.Scanner: token too long")
70	ErrNegativeAdvance = errors.New("bufio.Scanner: SplitFunc returns negative advance count")
71	ErrAdvanceTooFar   = errors.New("bufio.Scanner: SplitFunc returns advance count beyond input")
72)
73
74const (
75	// MaxScanTokenSize is the maximum size used to buffer a token
76	// unless the user provides an explicit buffer with Scanner.Buffer.
77	// The actual maximum token size may be smaller as the buffer
78	// may need to include, for instance, a newline.
79	MaxScanTokenSize = 64 * 1024
80
81	startBufSize = 4096 // Size of initial allocation for buffer.
82)
83
84// NewScanner returns a new Scanner to read from r.
85// The split function defaults to ScanLines.
86func NewScanner(r io.Reader) *Scanner {
87	return &Scanner{
88		r:            r,
89		split:        ScanLines,
90		maxTokenSize: MaxScanTokenSize,
91	}
92}
93
94// Err returns the first non-EOF error that was encountered by the Scanner.
95func (s *Scanner) Err() error {
96	if s.err == io.EOF {
97		return nil
98	}
99	return s.err
100}
101
102// Bytes returns the most recent token generated by a call to Scan.
103// The underlying array may point to data that will be overwritten
104// by a subsequent call to Scan. It does no allocation.
105func (s *Scanner) Bytes() []byte {
106	return s.token
107}
108
109// Text returns the most recent token generated by a call to Scan
110// as a newly allocated string holding its bytes.
111func (s *Scanner) Text() string {
112	return string(s.token)
113}
114
115// ErrFinalToken is a special sentinel error value. It is intended to be
116// returned by a Split function to indicate that the token being delivered
117// with the error is the last token and scanning should stop after this one.
118// After ErrFinalToken is received by Scan, scanning stops with no error.
119// The value is useful to stop processing early or when it is necessary to
120// deliver a final empty token. One could achieve the same behavior
121// with a custom error value but providing one here is tidier.
122// See the emptyFinalToken example for a use of this value.
123var ErrFinalToken = errors.New("final token")
124
125// Scan advances the Scanner to the next token, which will then be
126// available through the Bytes or Text method. It returns false when the
127// scan stops, either by reaching the end of the input or an error.
128// After Scan returns false, the Err method will return any error that
129// occurred during scanning, except that if it was io.EOF, Err
130// will return nil.
131// Scan panics if the split function returns too many empty
132// tokens without advancing the input. This is a common error mode for
133// scanners.
134func (s *Scanner) Scan() bool {
135	if s.done {
136		return false
137	}
138	s.scanCalled = true
139	// Loop until we have a token.
140	for {
141		// See if we can get a token with what we already have.
142		// If we've run out of data but have an error, give the split function
143		// a chance to recover any remaining, possibly empty token.
144		if s.end > s.start || s.err != nil {
145			advance, token, err := s.split(s.buf[s.start:s.end], s.err != nil)
146			if err != nil {
147				if err == ErrFinalToken {
148					s.token = token
149					s.done = true
150					return true
151				}
152				s.setErr(err)
153				return false
154			}
155			if !s.advance(advance) {
156				return false
157			}
158			s.token = token
159			if token != nil {
160				if s.err == nil || advance > 0 {
161					s.empties = 0
162				} else {
163					// Returning tokens not advancing input at EOF.
164					s.empties++
165					if s.empties > maxConsecutiveEmptyReads {
166						panic("bufio.Scan: too many empty tokens without progressing")
167					}
168				}
169				return true
170			}
171		}
172		// We cannot generate a token with what we are holding.
173		// If we've already hit EOF or an I/O error, we are done.
174		if s.err != nil {
175			// Shut it down.
176			s.start = 0
177			s.end = 0
178			return false
179		}
180		// Must read more data.
181		// First, shift data to beginning of buffer if there's lots of empty space
182		// or space is needed.
183		if s.start > 0 && (s.end == len(s.buf) || s.start > len(s.buf)/2) {
184			copy(s.buf, s.buf[s.start:s.end])
185			s.end -= s.start
186			s.start = 0
187		}
188		// Is the buffer full? If so, resize.
189		if s.end == len(s.buf) {
190			// Guarantee no overflow in the multiplication below.
191			const maxInt = int(^uint(0) >> 1)
192			if len(s.buf) >= s.maxTokenSize || len(s.buf) > maxInt/2 {
193				s.setErr(ErrTooLong)
194				return false
195			}
196			newSize := len(s.buf) * 2
197			if newSize == 0 {
198				newSize = startBufSize
199			}
200			if newSize > s.maxTokenSize {
201				newSize = s.maxTokenSize
202			}
203			newBuf := make([]byte, newSize)
204			copy(newBuf, s.buf[s.start:s.end])
205			s.buf = newBuf
206			s.end -= s.start
207			s.start = 0
208		}
209		// Finally we can read some input. Make sure we don't get stuck with
210		// a misbehaving Reader. Officially we don't need to do this, but let's
211		// be extra careful: Scanner is for safe, simple jobs.
212		for loop := 0; ; {
213			n, err := s.r.Read(s.buf[s.end:len(s.buf)])
214			s.end += n
215			if err != nil {
216				s.setErr(err)
217				break
218			}
219			if n > 0 {
220				s.empties = 0
221				break
222			}
223			loop++
224			if loop > maxConsecutiveEmptyReads {
225				s.setErr(io.ErrNoProgress)
226				break
227			}
228		}
229	}
230}
231
232// advance consumes n bytes of the buffer. It reports whether the advance was legal.
233func (s *Scanner) advance(n int) bool {
234	if n < 0 {
235		s.setErr(ErrNegativeAdvance)
236		return false
237	}
238	if n > s.end-s.start {
239		s.setErr(ErrAdvanceTooFar)
240		return false
241	}
242	s.start += n
243	return true
244}
245
246// setErr records the first error encountered.
247func (s *Scanner) setErr(err error) {
248	if s.err == nil || s.err == io.EOF {
249		s.err = err
250	}
251}
252
253// Buffer sets the initial buffer to use when scanning and the maximum
254// size of buffer that may be allocated during scanning. The maximum
255// token size is the larger of max and cap(buf). If max <= cap(buf),
256// Scan will use this buffer only and do no allocation.
257//
258// By default, Scan uses an internal buffer and sets the
259// maximum token size to MaxScanTokenSize.
260//
261// Buffer panics if it is called after scanning has started.
262func (s *Scanner) Buffer(buf []byte, max int) {
263	if s.scanCalled {
264		panic("Buffer called after Scan")
265	}
266	s.buf = buf[0:cap(buf)]
267	s.maxTokenSize = max
268}
269
270// Split sets the split function for the Scanner.
271// The default split function is ScanLines.
272//
273// Split panics if it is called after scanning has started.
274func (s *Scanner) Split(split SplitFunc) {
275	if s.scanCalled {
276		panic("Split called after Scan")
277	}
278	s.split = split
279}
280
281// Split functions
282
283// ScanBytes is a split function for a Scanner that returns each byte as a token.
284func ScanBytes(data []byte, atEOF bool) (advance int, token []byte, err error) {
285	if atEOF && len(data) == 0 {
286		return 0, nil, nil
287	}
288	return 1, data[0:1], nil
289}
290
291var errorRune = []byte(string(utf8.RuneError))
292
293// ScanRunes is a split function for a Scanner that returns each
294// UTF-8-encoded rune as a token. The sequence of runes returned is
295// equivalent to that from a range loop over the input as a string, which
296// means that erroneous UTF-8 encodings translate to U+FFFD = "\xef\xbf\xbd".
297// Because of the Scan interface, this makes it impossible for the client to
298// distinguish correctly encoded replacement runes from encoding errors.
299func ScanRunes(data []byte, atEOF bool) (advance int, token []byte, err error) {
300	if atEOF && len(data) == 0 {
301		return 0, nil, nil
302	}
303
304	// Fast path 1: ASCII.
305	if data[0] < utf8.RuneSelf {
306		return 1, data[0:1], nil
307	}
308
309	// Fast path 2: Correct UTF-8 decode without error.
310	_, width := utf8.DecodeRune(data)
311	if width > 1 {
312		// It's a valid encoding. Width cannot be one for a correctly encoded
313		// non-ASCII rune.
314		return width, data[0:width], nil
315	}
316
317	// We know it's an error: we have width==1 and implicitly r==utf8.RuneError.
318	// Is the error because there wasn't a full rune to be decoded?
319	// FullRune distinguishes correctly between erroneous and incomplete encodings.
320	if !atEOF && !utf8.FullRune(data) {
321		// Incomplete; get more bytes.
322		return 0, nil, nil
323	}
324
325	// We have a real UTF-8 encoding error. Return a properly encoded error rune
326	// but advance only one byte. This matches the behavior of a range loop over
327	// an incorrectly encoded string.
328	return 1, errorRune, nil
329}
330
331// dropCR drops a terminal \r from the data.
332func dropCR(data []byte) []byte {
333	if len(data) > 0 && data[len(data)-1] == '\r' {
334		return data[0 : len(data)-1]
335	}
336	return data
337}
338
339// ScanLines is a split function for a Scanner that returns each line of
340// text, stripped of any trailing end-of-line marker. The returned line may
341// be empty. The end-of-line marker is one optional carriage return followed
342// by one mandatory newline. In regular expression notation, it is `\r?\n`.
343// The last non-empty line of input will be returned even if it has no
344// newline.
345func ScanLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
346	if atEOF && len(data) == 0 {
347		return 0, nil, nil
348	}
349	if i := bytes.IndexByte(data, '\n'); i >= 0 {
350		// We have a full newline-terminated line.
351		return i + 1, dropCR(data[0:i]), nil
352	}
353	// If we're at EOF, we have a final, non-terminated line. Return it.
354	if atEOF {
355		return len(data), dropCR(data), nil
356	}
357	// Request more data.
358	return 0, nil, nil
359}
360
361// isSpace reports whether the character is a Unicode white space character.
362// We avoid dependency on the unicode package, but check validity of the implementation
363// in the tests.
364func isSpace(r rune) bool {
365	if r <= '\u00FF' {
366		// Obvious ASCII ones: \t through \r plus space. Plus two Latin-1 oddballs.
367		switch r {
368		case ' ', '\t', '\n', '\v', '\f', '\r':
369			return true
370		case '\u0085', '\u00A0':
371			return true
372		}
373		return false
374	}
375	// High-valued ones.
376	if '\u2000' <= r && r <= '\u200a' {
377		return true
378	}
379	switch r {
380	case '\u1680', '\u2028', '\u2029', '\u202f', '\u205f', '\u3000':
381		return true
382	}
383	return false
384}
385
386// ScanWords is a split function for a Scanner that returns each
387// space-separated word of text, with surrounding spaces deleted. It will
388// never return an empty string. The definition of space is set by
389// unicode.IsSpace.
390func ScanWords(data []byte, atEOF bool) (advance int, token []byte, err error) {
391	// Skip leading spaces.
392	start := 0
393	for width := 0; start < len(data); start += width {
394		var r rune
395		r, width = utf8.DecodeRune(data[start:])
396		if !isSpace(r) {
397			break
398		}
399	}
400	// Scan until space, marking end of word.
401	for width, i := 0, start; i < len(data); i += width {
402		var r rune
403		r, width = utf8.DecodeRune(data[i:])
404		if isSpace(r) {
405			return i + width, data[start:i], nil
406		}
407	}
408	// If we're at EOF, we have a final, non-empty, non-terminated word. Return it.
409	if atEOF && len(data) > start {
410		return len(data), data[start:], nil
411	}
412	// Request more data.
413	return start, nil, nil
414}
415