diff --git a/src/os.cpp b/src/os.cpp index 7638ec5685..fd87cef082 100644 --- a/src/os.cpp +++ b/src/os.cpp @@ -43,7 +43,7 @@ static int read_all_fd_stream(int fd, Buf *out_buf) { buf_resize(out_buf, buf_size); ssize_t actual_buf_len = 0; for (;;) { - ssize_t amt_read = read(fd, buf_ptr(out_buf), buf_len(out_buf)); + ssize_t amt_read = read(fd, buf_ptr(out_buf) + actual_buf_len, buf_size); if (amt_read < 0) { return ErrorFileSystem; } diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index eb7c294629..13728835d2 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -878,6 +878,7 @@ void tokenize(Buf *buf, Tokenization *out) { if (digit_value >= 0) { if (digit_value >= t.cur_tok->radix) { tokenize_error(&t, "invalid character: '%c'", c); + break; } // normal digit } else { @@ -904,6 +905,7 @@ void tokenize(Buf *buf, Tokenization *out) { if (digit_value >= 0) { if (digit_value >= t.cur_tok->radix) { tokenize_error(&t, "invalid character: '%c'", c); + break; } // normal digit } else {