@@ -475,6 +475,12 @@ impl<'a> Parser<'a> {
475
475
if expecting_statement_delimiter && word.keyword == Keyword::END {
476
476
break;
477
477
}
478
+ // Treat batch delimiter as an end of statement
479
+ if expecting_statement_delimiter && dialect_of!(self is MsSqlDialect) {
480
+ if let Some(Statement::Go(GoStatement { count: _ })) = stmts.last() {
481
+ expecting_statement_delimiter = false;
482
+ }
483
+ }
478
484
}
479
485
_ => {}
480
486
}
@@ -617,6 +623,7 @@ impl<'a> Parser<'a> {
617
623
}
618
624
// `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
619
625
Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
626
+ Keyword::GO => self.parse_go(),
620
627
_ => self.expected("an SQL statement", next_token),
621
628
},
622
629
Token::LParen => {
@@ -15058,6 +15065,57 @@ impl<'a> Parser<'a> {
15058
15065
}
15059
15066
}
15060
15067
15068
+ /// Parse [Statement::Go]
15069
+ fn parse_go(&mut self) -> Result<Statement, ParserError> {
15070
+ // previous token should be a newline (skipping non-newline whitespace)
15071
+ // see also, `previous_token`
15072
+ let mut look_back_count = 2;
15073
+ loop {
15074
+ let prev_token = self.token_at(self.index.saturating_sub(look_back_count));
15075
+ match prev_token.token {
15076
+ Token::Whitespace(ref w) => match w {
15077
+ Whitespace::Newline => break,
15078
+ _ => look_back_count += 1,
15079
+ },
15080
+ _ => {
15081
+ if prev_token == self.get_current_token() {
15082
+ // if we are at the start of the statement, we can skip this check
15083
+ break;
15084
+ }
15085
+
15086
+ self.expected("newline before GO", prev_token.clone())?
15087
+ }
15088
+ };
15089
+ }
15090
+
15091
+ let count = loop {
15092
+ // using this peek function because we want to halt this statement parsing upon newline
15093
+ let next_token = self.peek_token_no_skip();
15094
+ match next_token.token {
15095
+ Token::EOF => break None::<u64>,
15096
+ Token::Whitespace(ref w) => match w {
15097
+ Whitespace::Newline => break None,
15098
+ _ => _ = self.next_token_no_skip(),
15099
+ },
15100
+ Token::Number(s, _) => {
15101
+ let value = Some(Self::parse::<u64>(s, next_token.span.start)?);
15102
+ self.advance_token();
15103
+ break value;
15104
+ }
15105
+ _ => self.expected("literal int or newline", next_token)?,
15106
+ };
15107
+ };
15108
+
15109
+ if self.peek_token().token == Token::SemiColon {
15110
+ parser_err!(
15111
+ "GO may not end with a semicolon",
15112
+ self.peek_token().span.start
15113
+ )?;
15114
+ }
15115
+
15116
+ Ok(Statement::Go(GoStatement { count }))
15117
+ }
15118
+
15061
15119
/// Consume the parser and return its underlying token buffer
15062
15120
pub fn into_tokens(self) -> Vec<TokenWithSpan> {
15063
15121
self.tokens
0 commit comments