language
stringlengths
0
24
filename
stringlengths
9
214
code
stringlengths
99
9.93M
Rust
hhvm/hphp/hack/src/parser/core/lexer.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::cell::RefCell; use std::ops::DerefMut; use std::rc::Rc; use parser_core_types::lexable_token::LexableToken; use parser_core_types::lexable_trivia::LexableTrivia; use parser_core_types::lexable_trivia::LexableTrivium; use parser_core_types::source_text::SourceText; use parser_core_types::source_text::INVALID; use parser_core_types::syntax_error::Error; use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_error::{self as Errors}; use parser_core_types::token_factory::TokenFactory; use parser_core_types::token_factory::Trivia; use parser_core_types::token_factory::Trivium; use parser_core_types::token_kind::TokenKind; use parser_core_types::trivia_factory::TriviaFactory; use parser_core_types::trivia_kind::TriviaKind; use static_assertions::*; #[derive(Debug)] struct LexerPreSnapshot { start: usize, offset: usize, in_type: bool, } #[derive(Debug)] struct LexerPostSnapshot { start: usize, offset: usize, in_type: bool, errors: Vec<SyntaxError>, } impl<'a, TF> PartialEq<Lexer<'a, TF>> for LexerPreSnapshot where TF: TokenFactory, { fn eq(&self, other: &Lexer<'a, TF>) -> bool { self.start == other.start && self.offset == other.offset && self.in_type == other.in_type } } /* Lexer Caching One token look ahead in parser is implemented by `parser.peek_token()` ... `parser.next_token()`. Re-scanning in next_token can be avoided by caching the result of `peek_token`, consecutive `peek_token`s can also get improved. `Lexer.peek_next_token()` checks cache first if cache misses it will clone of the current lexer and call next_token on cloned lexer. To cache the result, it takes a snapshot of lexer state before and after calling next_token, and store them in current lexer. Clone trait of Lexer is derived automatically, therefore `cache: Rc<...>` is also cloned. `Rc` ensures cloned lexer and original lexer share the same cache, this is intended! Other than one token look ahead still clones parser, therefore lexer get cloned, sharing cache allows cloned lexer uses cache from original lexer and vise versa. It is measured that 2% faster than not sharing cache. NOTE: There is an invariant assumed by this caching mechanism. `errors` in `Lexer` can only add new errors and must not remove any error when scanning forward! `Lexer.peek_next_token()` clones a new `Lexer` and reset `errors` to empty, look ahead may accumulate new errors and these errors will be appended to the original `Lexer`. The reason we need this invariant is that between `peek_next_token` and `next_token` we can not prove no new error added. Actually it is observed that new errors are added between these two calls. */ #[derive(Debug)] struct LexerCache<Token>(LexerPreSnapshot, Token, LexerPostSnapshot); #[derive(Debug, Clone)] pub struct Lexer<'a, TF> where TF: TokenFactory, { source: SourceText<'a>, start: usize, offset: usize, errors: Vec<SyntaxError>, in_type: bool, token_factory: TF, cache: Rc<RefCell<Option<LexerCache<TF::Token>>>>, } #[derive(Debug, PartialEq)] pub enum StringLiteralKind { LiteralDoubleQuoted, LiteralHeredoc { heredoc: Vec<u8> }, } #[derive(Debug, Copy, Clone)] pub enum KwSet { AllKeywords, NonReservedKeywords, NoKeywords, } macro_rules! as_case_insensitive_keyword { ($size:tt $(, $keyword:tt)+) => { fn as_case_insensitive_keyword(&self, text: &str) -> Option<(&'static str, bool)> { // - The $size should be greater than or equal to the each length of keyword // - The $size should be equal to at least one of the length of a keyword // Therefore, $size is equal to the length of the longest keyword. $( const_assert!($size >= $keyword.len()); )* const_assert!( $( $size == $keyword.len() || )* false ); if text.len() > $size { None } else { let mut t: heapless::String<$size> = text.into(); let t: &mut str = t.as_mut_str(); t.make_ascii_lowercase(); let has_upper = t != text; let t: &str = t as &str; match t { $( $keyword => Some(($keyword, has_upper)), )* _ => None, } } } } } impl<'a, TF> Lexer<'a, TF> where TF: TokenFactory, { fn to_lexer_pre_snapshot(&self) -> LexerPreSnapshot { LexerPreSnapshot { start: self.start, offset: self.offset, in_type: self.in_type, } } fn into_lexer_post_snapshot(self) -> LexerPostSnapshot { LexerPostSnapshot { start: self.start, offset: self.offset, in_type: self.in_type, errors: self.errors, } } pub fn make_at(source: &SourceText<'a>, offset: usize, token_factory: TF) -> Self { Self { source: source.clone(), start: offset, offset, errors: vec![], in_type: false, cache: Rc::new(RefCell::new(None)), token_factory, } } pub fn make(source: &SourceText<'a>, token_factory: TF) -> Self { Self::make_at(source, 0, token_factory) } fn continue_from(&mut self, l: Lexer<'a, TF>) { self.start = l.start; self.offset = l.offset; self.errors = l.errors } pub fn start(&self) -> usize { self.start } pub fn offset(&self) -> usize { self.offset } pub fn errors(&self) -> &[SyntaxError] { &self.errors } fn with_error(&mut self, error: Error) { let error = SyntaxError::make(self.start(), self.offset(), error, vec![]); self.errors.push(error) } fn with_offset(&mut self, offset: usize) { self.offset = offset } fn with_start_offset(&mut self, start: usize, offset: usize) { self.start = start; self.offset = offset; } fn start_new_lexeme(&mut self) { self.start = self.offset } pub fn advance(&mut self, i: usize) { self.offset += i } pub fn set_in_type(&mut self, in_type: bool) { self.in_type = in_type } pub fn source(&self) -> &SourceText<'a> { &self.source } fn source_text_string(&self) -> &[u8] { self.source.text() } // Housekeeping pub fn peek_char(&self, index: usize) -> char { self.source.get(self.offset() + index) } fn peek_string(&self, size: usize) -> &[u8] { self.source.sub(self.offset, size) } fn match_string(&self, s: &[u8]) -> bool { s == self.peek_string(s.len()) } fn width(&self) -> usize { self.offset - self.start } fn current_text(&self) -> &[u8] { self.source.sub(self.start, self.width()) } fn current_text_as_str(&self) -> &str { unsafe { std::str::from_utf8_unchecked(self.current_text()) } } fn at_end(&self) -> bool { self.offset() >= self.source.length() } fn remaining(&self) -> usize { let r = (self.source.length() as isize) - (self.offset as isize); if r < 0 { 0 } else { r as usize } } fn peek(&self, i: usize) -> char { self.source.get(i) } fn peek_back(&self, index: usize) -> char { self.source.get(self.offset() - index) } fn peek_def(&self, index: usize, default: char) -> char { if index >= self.source.length() { default } else { self.source.get(index) } } // Character classification fn is_whitespace_no_newline(c: char) -> bool { match c { ' ' | '\t' => true, _ => false, } } fn is_newline(ch: char) -> bool { match ch { '\r' | '\n' => true, _ => false, } } fn is_binary_digit(ch: char) -> bool { match ch { '0' | '1' => true, _ => false, } } fn is_octal_digit(c: char) -> bool { ('0'..='7').contains(&c) } fn is_decimal_digit(ch: char) -> bool { ('0'..='9').contains(&ch) } fn is_hexadecimal_digit(c: char) -> bool { ('0'..='9').contains(&c) || ('a'..='f').contains(&c) || ('A'..='F').contains(&c) } fn is_name_nondigit(c: char) -> bool { (c == '_') || ('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || ('\x7f' <= c) } fn is_name_letter(c: char) -> bool { (c == '_') || ('0'..='9').contains(&c) || ('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || ('\x7f' <= c) } // Lexing fn skip_while_to_offset(&self, p: impl Fn(char) -> bool) -> usize { let n = self.source.length(); let mut i = self.offset(); while i < n && p(self.peek(i)) { i += 1; } i } // advance offset as long as the predicate is true fn skip_while(&mut self, p: impl Fn(char) -> bool) { self.with_offset(self.skip_while_to_offset(p)) } fn str_skip_while(s: &[u8], mut i: usize, p: impl Fn(char) -> bool) -> usize { let n = s.len(); loop { if i < n && p(s[i] as char) { i += 1 } else { return i; } } } fn skip_whitespace(&mut self) { self.skip_while(Self::is_whitespace_no_newline); } fn str_skip_whitespace(s: &[u8], i: usize) -> usize { Self::str_skip_while(s, i, Self::is_whitespace_no_newline) } fn not_newline(ch: char) -> bool { !(Self::is_newline(ch)) } fn skip_to_end_of_line(&mut self) { self.skip_while(Self::not_newline) } fn skip_name_end(&mut self) { self.skip_while(Self::is_name_letter) } fn skip_end_of_line(&mut self) { match self.peek_char(0) { '\n' => self.advance(1), '\r' => { if self.peek_char(1) == '\n' { self.advance(2) } else { self.advance(1) } } _ => {} } } fn scan_name_impl(&mut self) { assert!(Self::is_name_nondigit(self.peek_char(0))); self.advance(1); self.skip_name_end(); } fn scan_name(&mut self) -> TokenKind { self.scan_name_impl(); TokenKind::Name } fn scan_variable(&mut self) -> TokenKind { assert_eq!('$', self.peek_char(0)); self.advance(1); self.scan_name_impl(); TokenKind::Variable } fn scan_with_underscores(&mut self, accepted_char: impl Fn(char) -> bool) { let n = self.source.length(); let peek_def = |i| if i < n { self.peek(i) } else { INVALID }; let mut i = self.offset(); while i < n { let ch = self.peek(i); if accepted_char(ch) { i += 1 } else if ch == '_' && accepted_char(peek_def(i + 1)) { i += 2; } else { break; } } self.with_offset(i); } fn scan_decimal_digits_with_underscores(&mut self) { self.scan_with_underscores(Self::is_decimal_digit); } fn scan_octal_digits_with_underscores(&mut self) { self.scan_with_underscores(Self::is_octal_digit) } fn scan_binary_digits_with_underscores(&mut self) { self.scan_with_underscores(Self::is_binary_digit) } fn scan_hexadecimal_digits(&mut self) { self.skip_while(Self::is_hexadecimal_digit) } fn scan_hexadecimal_digits_with_underscores(&mut self) { self.scan_with_underscores(Self::is_hexadecimal_digit) } fn scan_hex_literal(&mut self) -> TokenKind { let ch = self.peek_char(0); if !Self::is_hexadecimal_digit(ch) { self.with_error(Errors::error0001); TokenKind::HexadecimalLiteral } else { self.scan_hexadecimal_digits_with_underscores(); TokenKind::HexadecimalLiteral } } fn scan_binary_literal(&mut self) -> TokenKind { let ch = self.peek_char(0); if !Self::is_binary_digit(ch) { self.with_error(Errors::error0002); TokenKind::BinaryLiteral } else { self.scan_binary_digits_with_underscores(); TokenKind::BinaryLiteral } } fn scan_exponent_with_underscores(&mut self) -> TokenKind { let ch = self.peek_char(1); if ch == '+' || ch == '-' { self.advance(2) } else { self.advance(1) } let ch = self.peek_char(0); if !Self::is_decimal_digit(ch) { self.with_error(Errors::error0003); TokenKind::FloatingLiteral } else { self.scan_decimal_digits_with_underscores(); TokenKind::FloatingLiteral } } fn scan_after_decimal_point_with_underscores(&mut self) -> TokenKind { self.advance(1); let ch = self.peek_char(0); if ch == '_' { TokenKind::FloatingLiteral } else { self.scan_decimal_digits_with_underscores(); let ch = self.peek_char(0); if ch == 'e' || ch == 'E' { self.scan_exponent_with_underscores() } else { TokenKind::FloatingLiteral } } } fn scan_octal_or_float(&mut self) -> TokenKind { // We've scanned a leading zero. // We have an irritating ambiguity here. 09 is not a legal octal or // floating literal, but 09e1 and 09.1 are. self.advance(1); let ch = self.peek_char(0); match ch { '.' => // 0. { self.scan_after_decimal_point_with_underscores() } 'e' | 'E' => // 0e { self.scan_exponent_with_underscores() } _ if ('0'..='9').contains(&ch) => { // 05 let mut lexer_oct = self.clone(); lexer_oct.scan_octal_digits_with_underscores(); let mut lexer_dec = self.clone(); lexer_dec.scan_decimal_digits_with_underscores(); if (lexer_oct.width()) == (lexer_dec.width()) { // Only octal digits. Could be an octal literal, or could // be a float. let ch = lexer_oct.peek_char(0); if ch == 'e' || ch == 'E' { self.continue_from(lexer_oct); self.scan_exponent_with_underscores() } else if ch == '.' { self.continue_from(lexer_oct); self.scan_after_decimal_point_with_underscores() } else { self.continue_from(lexer_oct); TokenKind::OctalLiteral } } else { // We had decimal digits following a leading zero; this is either a // float literal or an octal to be truncated at the first non-octal // digit. let ch = lexer_dec.peek_char(0); if ch == 'e' || ch == 'E' { self.continue_from(lexer_dec); self.scan_exponent_with_underscores() } else if ch == '.' { self.continue_from(lexer_dec); self.scan_after_decimal_point_with_underscores() } else { // an octal to be truncated at the first non-octal digit self.scan_decimal_digits_with_underscores(); TokenKind::OctalLiteral } } } _ => // 0 is a decimal literal { TokenKind::DecimalLiteral } } } fn scan_decimal_or_float(&mut self) -> TokenKind { // We've scanned a leading non-zero digit. self.scan_decimal_digits_with_underscores(); let ch = self.peek_char(0); match ch { '.' => // 123. { self.scan_after_decimal_point_with_underscores() } 'e' | 'E' => // 123e { self.scan_exponent_with_underscores() } _ => // 123 { TokenKind::DecimalLiteral } } } fn scan_single_quote_string_literal(&mut self) -> TokenKind { // TODO: What about newlines embedded? // SPEC: // single-quoted-string-literal:: // b-opt ' sq-char-sequence-opt ' // // TODO: What is this b-opt? We don't lex an optional 'b' before a literal. // // sq-char-sequence:: // sq-char // sq-char-sequence sq-char // // sq-char:: // sq-escape-sequence // \opt any character except single-quote (') or backslash (\) // // sq-escape-sequence:: one of // \' \\ let n = self.source.length(); let peek = |x| self.source.get(x); let mut has_error0012 = false; let mut has_error0006 = false; let mut i = 1 + self.offset(); let new_offset = loop { if i >= n { has_error0012 = true; break n; } else { let ch = peek(i); match ch { INVALID => { has_error0006 = true; i += 1 } '\\' => i += 2, '\'' => break (1 + i), _ => i += 1, } } }; if has_error0006 { self.with_error(Errors::error0006) } if has_error0012 { self.with_error(Errors::error0012) } self.with_offset(new_offset); TokenKind::SingleQuotedStringLiteral } fn scan_hexadecimal_escape(&mut self) { let ch2 = self.peek_char(2); let ch3 = self.peek_char(3); if !(Self::is_hexadecimal_digit(ch2)) { // TODO: Consider producing an error for a malformed hex escape // let lexer = with_error lexer SyntaxError.error0005 in self.advance(2); } else if !(Self::is_hexadecimal_digit(ch3)) { // let lexer = with_error lexer SyntaxError.error0005 in self.advance(3) } else { self.advance(4) } } fn scan_unicode_escape(&mut self) { // At present the lexer is pointing at \u if self.peek_char(2) == '{' { if self.peek_char(3) == '$' { // We have a malformed unicode escape that contains a possible embedded // expression. Eat the \u and keep on processing the embedded expression. // TODO: Consider producing a warning for a malformed unicode escape. self.advance(2) } else { // We have a possibly well-formed escape sequence, and at least we know // that it is not an embedded expression. // TODO: Consider producing an error if the digits are out of range // of legal Unicode characters. // TODO: Consider producing an error if there are no digits. // Skip over the slash, u and brace, and start lexing the number. self.advance(3); self.scan_hexadecimal_digits(); let ch = self.peek_char(0); if ch != '}' { // TODO: Consider producing a warning for a malformed unicode escape. {} } else { self.advance(1) } } } else { // We have a malformed unicode escape sequence. Bail out. // TODO: Consider producing a warning for a malformed unicode escape. self.advance(2) } } fn skip_uninteresting_double_quote_like_string_characters(&mut self) { let is_uninteresting = |ch| match ch { INVALID | '\\' | '$' | '{' | '[' | ']' | '-' => false, ch if ('0'..='9').contains(&ch) => false, ch => ch != '"' && !Self::is_name_nondigit(ch), }; self.skip_while(is_uninteresting); } fn scan_integer_literal_in_string(&mut self) -> TokenKind { if self.peek_char(0) == '0' { match self.peek_char(1) { 'x' | 'X' => { self.advance(2); self.scan_hex_literal() } 'b' | 'B' => { self.advance(2); self.scan_binary_literal() } _ => { // An integer literal starting with 0 in a string will actually // always be treated as a string index in HHVM, and not as an octal. // In such a case, HHVM actually scans all decimal digits to create the // token. TODO: (kasper) T40381519 we may want to change this behavior to something more // sensible self.scan_decimal_digits_with_underscores(); TokenKind::DecimalLiteral } } } else { self.scan_decimal_digits_with_underscores(); TokenKind::DecimalLiteral } } fn scan_double_quote_like_string_literal_from_start(&mut self) -> TokenKind { let literal_token_kind = TokenKind::DoubleQuotedStringLiteral; let head_token_kind = TokenKind::DoubleQuotedStringLiteralHead; self.advance(1); loop { // If there's nothing interesting in this double-quoted string then // we can just hand it back as-is. self.skip_uninteresting_double_quote_like_string_characters(); match self.peek_char(0) { INVALID => { // If the string is unterminated then give an error; if this is an // embedded zero character then give an error and recurse; we might // be able to make more progress. if self.at_end() { self.with_error(Errors::error0012); break literal_token_kind; } else { self.with_error(Errors::error0006); self.advance(1) } } '"' => { // We made it to the end without finding a special character. self.advance(1); break literal_token_kind; } _ => // We've found a backslash, dollar or brace. { break head_token_kind; } } } } fn is_heredoc_tail(&self, name: &[u8]) -> bool { // A heredoc tail is the identifier immediately preceded by a newline // and immediately followed by an optional semi and then a newline. // // Note that the newline and optional semi are not part of the literal; // the literal's lexeme ends at the end of the name. Either there is // no trivia and the next token is a semi-with-trailing-newline, or // the trailing trivia is a newline. // // This odd rule is to ensure that both // $x = <<<HERE // something // HERE; // // and // // $x = <<<HERE // something // HERE // . "something else"; // // are legal. if !(Self::is_newline(self.peek_back(1))) { false } else { let len = name.len(); let ch0 = self.peek_char(len); let ch1 = self.peek_char(len + 1); ((Self::is_newline(ch0)) || ch0 == ';' && (Self::is_newline(ch1))) && self.peek_string(len) == name } } fn get_tail_token_kind(&self, literal_kind: &StringLiteralKind) -> TokenKind { match literal_kind { StringLiteralKind::LiteralHeredoc { .. } => TokenKind::HeredocStringLiteralTail, StringLiteralKind::LiteralDoubleQuoted => TokenKind::DoubleQuotedStringLiteralTail, } } fn get_string_literal_body_or_double_quoted_tail( &self, literal_kind: &StringLiteralKind, ) -> TokenKind { if literal_kind == &StringLiteralKind::LiteralDoubleQuoted { TokenKind::DoubleQuotedStringLiteralTail } else { TokenKind::StringLiteralBody } } fn scan_string_literal_in_progress(&mut self, literal_kind: &StringLiteralKind) -> TokenKind { let (is_heredoc, name): (bool, &[u8]) = match literal_kind { StringLiteralKind::LiteralHeredoc { heredoc } => (true, heredoc), _ => (false, b""), }; let ch0 = self.peek_char(0); if Self::is_name_nondigit(ch0) { if is_heredoc && (self.is_heredoc_tail(name)) { self.scan_name_impl(); TokenKind::HeredocStringLiteralTail } else { self.scan_name_impl(); TokenKind::Name } } else { match ch0 { INVALID => { if self.at_end() { self.with_error(Errors::error0012); self.get_tail_token_kind(literal_kind) } else { self.with_error(Errors::error0006); self.advance(1); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } } '"' => { let kind = self.get_string_literal_body_or_double_quoted_tail(literal_kind); self.advance(1); kind } '$' => { if Self::is_name_nondigit(self.peek_char(1)) { self.scan_variable() } else { self.advance(1); TokenKind::Dollar } } '{' => { self.advance(1); TokenKind::LeftBrace } '\\' => { match self.peek_char(1) { // In these cases we just skip the escape sequence and // keep on scanning for special characters. | '\\' | '"' | '$' | 'e' | 'f' | 'n' | 'r' | 't' | 'v' | '`' // Same in these cases; there might be more octal characters following but // if there are, we'll just eat them as normal characters. | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' => { self.advance(2); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody} | 'x' => { self.scan_hexadecimal_escape(); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } | 'u' => { self.scan_unicode_escape(); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } | '{' => { // The rules for escaping open braces in Hack are bizarre. Suppose we // have // $x = 123; // $y = 456; // $z = "\{$x,$y\}"; // What is the value of $z? Naively you would think that the backslash // escapes the braces, and the variables are embedded, so {123,456}. But // that's not what happens. Yes, the backslash makes the brace no longer // the opening brace of an expression. But the backslash is still part // of the string! This is the string \{123,456\}. // TODO: We might want to fix this because this is very strange. // Eat the backslash and the brace. self.advance(2); TokenKind::StringLiteralBody } | _ => { // TODO: A backslash followed by something other than an escape sequence // is legal in hack, and treated as though it was just the backslash // and the character. However we might consider making this a warning. // It is particularly egregious when we have something like: // $x = "abcdef \ // ghi"; // The author of the code likely means the backslash to mean line // continuation but in fact it just means to put a backslash and newline // in the string. self.advance(1); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } } } '[' => { self.advance(1); TokenKind::LeftBracket } ']' => { self.advance(1); TokenKind::RightBracket } '-' => { if (self.peek_char(1)) == '>' { self.advance(2); TokenKind::MinusGreaterThan } else { // Nothing interesting here. Skip it and find the next // interesting character. self.advance(1); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } } ch if ('0'..='9').contains(&ch) => { let mut lexer1 = self.clone(); let literal = lexer1.scan_integer_literal_in_string(); if self.errors.len() == lexer1.errors.len() { self.continue_from(lexer1); literal } else { // If we failed to scan a literal, do not interpret the literal self.with_offset(lexer1.offset()); TokenKind::StringLiteralBody } } _ => { // Nothing interesting here. Skip it and find the next // interesting character. self.advance(1); self.skip_uninteresting_double_quote_like_string_characters(); TokenKind::StringLiteralBody } } } } // A heredoc string literal has the form // // header // optional body // trailer // // The header is: // // <<< (optional whitespace) name (no whitespace) (newline) // // The optional body is: // // any characters whatsoever including newlines (newline) // // The trailer is: // // (no whitespace) name (no whitespace) (optional semi) (no whitespace) (newline) // // The names must be identical. The trailing semi and newline must be present. // // The body is any and all characters, up to the first line that exactly matches // the trailer. // // The body may contain embedded expressions. // // A nowdoc string literal has the same form except that the first name is // enclosed in single quotes, and it may not contain embedded expressions. fn scan_docstring_name_actual(&mut self) -> &'a [u8] { let ch = self.peek_char(0); if Self::is_name_nondigit(ch) { let start_offset = self.offset(); self.advance(1); self.skip_name_end(); self.source.sub(start_offset, self.offset() - start_offset) } else { self.with_error(Errors::error0008); b"" } } fn scan_docstring_name(&mut self) -> (&'a [u8], TokenKind) { self.skip_whitespace(); let ch = self.peek_char(0); let kind = if ch == '\'' { TokenKind::NowdocStringLiteral } else { TokenKind::HeredocStringLiteral }; let name = if ch == '\'' { self.advance(1); let name = self.scan_docstring_name_actual(); if (self.peek_char(0)) == '\'' { self.advance(1); name } else { self.with_error(Errors::error0010); name } } else { // Starting with PHP 5.3.0, the opening Heredoc identifier // may optionally be enclosed in double quotes: if ch == '"' { self.advance(1) }; let name = self.scan_docstring_name_actual(); if ch == '"' { // same logic as above, just for double quote if self.peek_char(0) == '\"' { self.advance(1); } else { self.with_error(Errors::missing_double_quote) } } name }; (name, kind) } fn scan_docstring_header(&mut self) -> (&'a [u8], TokenKind) { let ch = self.peek_char(0); // Skip 3 for <<< or 4 for b<<< let skip_count = if ch == 'b' { 4 } else { 3 }; self.advance(skip_count); let (name, kind) = self.scan_docstring_name(); let ch = self.peek_char(0); if !Self::is_newline(ch) { self.with_error(Errors::error0011) } self.skip_to_end_of_line(); self.skip_end_of_line(); (name, kind) } fn scan_docstring_remainder(&mut self, name: &[u8]) { let len = name.len(); loop { let ch0 = self.peek_char(len); let ch1 = self.peek_char(len + 1); if ((Self::is_newline(ch0)) || ch0 == ';' && (Self::is_newline(ch1))) && self.peek_string(len) == name { self.advance(len); break; } else { self.skip_to_end_of_line(); let ch = self.peek_char(0); if Self::is_newline(ch) { self.skip_end_of_line() } else { // If we got here then we ran off the end of the file without // finding a newline. Just bail. self.with_error(Errors::error0011); break; } } } } fn scan_docstring_literal(&mut self) -> TokenKind { let (name, kind) = self.scan_docstring_header(); self.scan_docstring_remainder(name); kind } fn scan_xhp_label(&mut self) { self.advance(1); self.skip_name_end(); } fn scan_xhp_element_name(&mut self, attribute: bool) -> TokenKind { // An XHP element name is a sequence of one or more XHP labels each separated // by a single : or -. Note that it is possible for an XHP element name to be // followed immediately by a : or - that is the next token, so if we find // a : or - not followed by a label, we need to terminate the token. self.scan_xhp_label(); let ch0 = self.peek_char(0); let ch1 = self.peek_char(1); if (!attribute && ch0 == ':' || ch0 == '-') && Self::is_name_nondigit(ch1) { self.advance(1); self.scan_xhp_element_name(false) } else { TokenKind::XHPElementName } } fn scan_xhp_class_no_dash(&mut self) -> TokenKind { self.scan_xhp_label(); let ch0 = self.peek_char(0); let ch1 = self.peek_char(1); if ch0 == ':' && Self::is_name_nondigit(ch1) { self.advance(1); self.scan_xhp_class_no_dash() } else { TokenKind::XHPElementName } } // Is the next token we're going to lex a possible xhp class name? fn is_xhp_class_name(&self) -> bool { (self.peek_char(0) == ':') && (Self::is_name_nondigit(self.peek_char(1))) } fn scan_xhp_class_name(&mut self) -> TokenKind { // An XHP class name is a colon followed by an xhp name. if self.is_xhp_class_name() { self.advance(1); self.scan_xhp_element_name(false); TokenKind::XHPClassName } else { self.with_error(Errors::error0008); self.advance(1); TokenKind::ErrorToken } } // To support xhp class style class definitions we don't require a : prefix fn scan_xhp_modifier_class_name(&mut self) -> TokenKind { // we don't want to allow xhp names with a : prefix here if self.peek_char(0) == ':' { self.with_error(Errors::error0008); TokenKind::ErrorToken } else { self.scan_xhp_class_no_dash(); TokenKind::XHPClassName } } fn scan_xhp_string_literal(&mut self) -> TokenKind { // XHP string literals are just straight up "find the closing quote" // strings. Embedded newlines are legal. let mut offset: usize = 1; loop { match self.peek_char(offset) { INVALID => { self.advance(offset); if self.at_end() { self.with_error(Errors::error0012); return TokenKind::XHPStringLiteral; } else { self.with_error(Errors::error0006); offset = 1 } } '"' => { self.advance(offset + 1); return TokenKind::XHPStringLiteral; } _ => offset += 1, } } } // Note that this does not scan an XHP body fn scan_xhp_token(&mut self) -> TokenKind { // TODO: HHVM requires that there be no trivia between < and name in an // opening tag, but does allow trivia between </ and name in a closing tag. // Consider allowing trivia in an opening tag. let ch0 = self.peek_char(0); if ch0 == INVALID && self.at_end() { TokenKind::EndOfFile } else if self.is_xhp_class_name() || Self::is_name_nondigit(ch0) { self.scan_xhp_element_name(false) } else { match ch0 { '{' => { self.advance(1); TokenKind::LeftBrace } '}' => { self.advance(1); TokenKind::RightBrace } '=' => { self.advance(1); TokenKind::Equal } '<' => { if (self.peek_char(1)) == '/' { self.advance(2); TokenKind::LessThanSlash } else { self.advance(1); TokenKind::LessThan } } '"' => self.scan_xhp_string_literal(), '/' => { if (self.peek_char(1)) == '>' { self.advance(2); TokenKind::SlashGreaterThan } else { self.with_error(Errors::error0006); self.advance(1); TokenKind::ErrorToken } } '>' => { self.advance(1); TokenKind::GreaterThan } _ => { self.with_error(Errors::error0006); self.advance(1); TokenKind::ErrorToken } } } } fn scan_xhp_comment(&mut self) { let mut offset = 4; loop { let ch0 = self.peek_char(offset); let ch1 = self.peek_char(offset + 1); let ch2 = self.peek_char(offset + 2); match (ch0, ch1, ch2) { (INVALID, _, _) => { self.advance(offset); return self.with_error(Errors::error0014); } ('-', '-', '>') => return self.advance(offset + 3), _ => offset += 1, } } } fn scan_xhp_body(&mut self) -> TokenKind { // Naively you might think that an XHP body is just a bunch of characters, // terminated by an embedded { } expression or a tag. However, whitespace // and newlines are relevant in XHP bodies because they are "soft". // That is, any section of contiguous trivia has the same semantics as a // single space or newline -- just as in HTML. // // Obviously this is of relevance to code formatters. // // Therefore we detect whitespace and newlines within XHP bodies and treat // it as trivia surrounding the tokens within the body. // // TODO: Is this also true of whitespace within XHP comments? If so then // we need to make XHP comments a sequence of tokens, rather than a // single token as they are now. let ch0 = self.peek_char(0); match ch0 { INVALID if self.at_end() => TokenKind::EndOfFile, '{' => { self.advance(1); TokenKind::LeftBrace } '}' => { self.advance(1); TokenKind::RightBrace } '<' => { let ch1 = self.peek_char(1); let ch2 = self.peek_char(2); let ch3 = self.peek_char(3); match (ch1, ch2, ch3) { ('!', '-', '-') => { self.scan_xhp_comment(); TokenKind::XHPComment } ('/', _, _) => { self.advance(2); TokenKind::LessThanSlash } _ => { self.advance(1); TokenKind::LessThan } } } _ => { let mut offset = 0; loop { let ch = self.peek_char(offset); match ch { INVALID => { self.advance(offset); if self.at_end() { self.with_error(Errors::error0013); break; } else { self.with_error(Errors::error0006); offset = 1 } } '\t' | ' ' | '\r' | '\n' | '{' | '}' | '<' => { self.advance(offset); break; } _ => offset += 1, } } TokenKind::XHPBody } } } fn scan_dollar_token(&mut self) -> TokenKind { // We have a problem here. We wish to be able to lexically analyze both // PHP and Hack, but the introduction of $$ to Hack makes them incompatible. // "$$x" and "$$ $x" are legal in PHP, but illegal in Hack. // The rule in PHP seems to be that $ is a prefix operator, it is a token, // it can be followed by trivia, but the next token has to be another $ // operator, a variable $x, or a {. // // Here's a reasonable compromise. (TODO: Review this decision.) // // $$x lexes as $ $x // $$$x lexes as $ $ $x // and so on. // // $$ followed by anything other than a name or a $ lexes as $$. // // This means that lexing a PHP program which contains "$$ $x" is different // will fail at parse time, but I'm willing to live with that. // // This means that lexing a Hack program which contains // "$x |> $$instanceof Foo" produces an error as well. // // If these decisions are unacceptable then we will need to make the lexer // be aware of whether it is lexing PHP or Hack; thus far we have not had // to make this distinction. // We are already at $. let ch1 = self.peek_char(1); match ch1 { '$' => { let ch2 = self.peek_char(2); if ch2 == '$' || ch2 == '{' || Self::is_name_nondigit(ch2) { self.advance(1); TokenKind::Dollar // $$x or $$$ } else { self.advance(2); TokenKind::DollarDollar // $$ } } _ => { if Self::is_name_nondigit(ch1) { self.scan_variable() // $x } else { self.advance(1); TokenKind::Dollar // $ } } } } fn scan_token(&mut self, in_type: bool) -> TokenKind { let ch0 = self.peek_char(0); match ch0 { '[' => { self.advance(1); TokenKind::LeftBracket } ']' => { self.advance(1); TokenKind::RightBracket } '(' => { self.advance(1); TokenKind::LeftParen } ')' => { self.advance(1); TokenKind::RightParen } '{' => { self.advance(1); TokenKind::LeftBrace } '}' => { self.advance(1); TokenKind::RightBrace } '.' => match self.peek_char(1) { '=' => { self.advance(2); TokenKind::DotEqual } ch if ('0'..='9').contains(&ch) => self.scan_after_decimal_point_with_underscores(), '.' => { if (self.peek_char(2)) == '.' { self.advance(3); TokenKind::DotDotDot } else { self.advance(1); TokenKind::Dot } } _ => { self.advance(1); TokenKind::Dot } }, '-' => match self.peek_char(1) { '=' => { self.advance(2); TokenKind::MinusEqual } '-' => { self.advance(2); TokenKind::MinusMinus } '>' => { self.advance(2); TokenKind::MinusGreaterThan } _ => { self.advance(1); TokenKind::Minus } }, '+' => match self.peek_char(1) { '=' => { self.advance(2); TokenKind::PlusEqual } '+' => { self.advance(2); TokenKind::PlusPlus } _ => { self.advance(1); TokenKind::Plus } }, '*' => match (self.peek_char(1), self.peek_char(2)) { ('=', _) => { self.advance(2); TokenKind::StarEqual } ('*', '=') => { self.advance(3); TokenKind::StarStarEqual } ('*', _) => { self.advance(2); TokenKind::StarStar } _ => { self.advance(1); TokenKind::Star } }, '~' => { self.advance(1); TokenKind::Tilde } '!' => match (self.peek_char(1), self.peek_char(2)) { ('=', '=') => { self.advance(3); TokenKind::ExclamationEqualEqual } ('=', _) => { self.advance(2); TokenKind::ExclamationEqual } _ => { self.advance(1); TokenKind::Exclamation } }, '$' => self.scan_dollar_token(), '/' => { if (self.peek_char(1)) == '=' { self.advance(2); TokenKind::SlashEqual } else { self.advance(1); TokenKind::Slash } } '%' => { if (self.peek_char(1)) == '=' { self.advance(2); TokenKind::PercentEqual } else { self.advance(1); TokenKind::Percent } } '<' => { match (self.peek_char(1), self.peek_char(2)) { ('<', '<') => self.scan_docstring_literal(), ('<', '=') => { self.advance(3); TokenKind::LessThanLessThanEqual } // TODO: We lex and parse the spaceship operator. // TODO: This is not in the spec at present. We should either make it an // TODO: error, or add it to the specification. ('=', '>') => { self.advance(3); TokenKind::LessThanEqualGreaterThan } ('=', _) => { self.advance(2); TokenKind::LessThanEqual } ('<', _) => { self.advance(2); TokenKind::LessThanLessThan } _ => { self.advance(1); TokenKind::LessThan } } } '>' => { match (self.peek_char(1), self.peek_char(2)) { // If we are parsing a generic type argument list then we might be at the >> // in `List<List<int>>``, or at the >= of `let x:vec<int>=...`. In that case // we want to lex two >'s instead of >> / one > and one = instead of >=. (ch, _) if (ch == '>' || ch == '=') && in_type => { self.advance(1); TokenKind::GreaterThan } ('>', '=') => { self.advance(3); TokenKind::GreaterThanGreaterThanEqual } ('>', _) => { self.advance(2); TokenKind::GreaterThanGreaterThan } ('=', _) => { self.advance(2); TokenKind::GreaterThanEqual } _ => { self.advance(1); TokenKind::GreaterThan } } } '=' => match (self.peek_char(1), self.peek_char(2)) { ('=', '=') => { self.advance(3); TokenKind::EqualEqualEqual } ('=', '>') => { self.advance(3); TokenKind::EqualEqualGreaterThan } ('=', _) => { self.advance(2); TokenKind::EqualEqual } ('>', _) => { self.advance(2); TokenKind::EqualGreaterThan } _ => { self.advance(1); TokenKind::Equal } }, '^' => { if (self.peek_char(1)) == '=' { self.advance(2); TokenKind::CaratEqual } else { self.advance(1); TokenKind::Carat } } '|' => match self.peek_char(1) { '=' => { self.advance(2); TokenKind::BarEqual } '>' => { self.advance(2); TokenKind::BarGreaterThan } '|' => { self.advance(2); TokenKind::BarBar } _ => { self.advance(1); TokenKind::Bar } }, '&' => match self.peek_char(1) { '=' => { self.advance(2); TokenKind::AmpersandEqual } '&' => { self.advance(2); TokenKind::AmpersandAmpersand } _ => { self.advance(1); TokenKind::Ampersand } }, '?' => match (self.peek_char(1), self.peek_char(2)) { (':', _) if !in_type => { self.advance(2); TokenKind::QuestionColon } ('-', '>') => { self.advance(3); TokenKind::QuestionMinusGreaterThan } ('?', '=') => { self.advance(3); TokenKind::QuestionQuestionEqual } ('?', _) => { self.advance(2); TokenKind::QuestionQuestion } ('a', 's') if !Self::is_name_nondigit(self.peek_char(3)) => { self.advance(3); TokenKind::QuestionAs } _ => { self.advance(1); TokenKind::Question } }, ':' => { let ch1 = self.peek_char(1); if ch1 == ':' { self.advance(2); TokenKind::ColonColon } else { self.advance(1); TokenKind::Colon } } ';' => { self.advance(1); TokenKind::Semicolon } ',' => { self.advance(1); TokenKind::Comma } '@' => { self.advance(1); TokenKind::At } '0' => match self.peek_char(1) { 'x' | 'X' => { self.advance(2); self.scan_hex_literal() } 'b' | 'B' => { self.advance(2); self.scan_binary_literal() } _ => self.scan_octal_or_float(), }, ch if ('1'..='9').contains(&ch) => self.scan_decimal_or_float(), '\'' => self.scan_single_quote_string_literal(), '"' => self.scan_double_quote_like_string_literal_from_start(), '`' => { self.advance(1); TokenKind::Backtick } '\\' => { self.advance(1); TokenKind::Backslash } '#' => { self.advance(1); TokenKind::Hash } 'b' if { let c1 = self.peek_char(1); let c2 = self.peek_char(2); let c3 = self.peek_char(3); c1 == '"' || c1 == '\'' || (c1 == '<' && c2 == '<' && c3 == '<') } => { self.advance(1); self.scan_token(in_type) } // Names _ => { if ch0 == INVALID && self.at_end() { TokenKind::EndOfFile } else if Self::is_name_nondigit(ch0) { self.scan_name() } else { self.with_error(Errors::error0006); self.advance(1); TokenKind::ErrorToken } } } } fn scan_token_outside_type(&mut self) -> TokenKind { self.scan_token(false) } fn scan_token_inside_type(&mut self) -> TokenKind { self.scan_token(true) } // Lexing trivia // SPEC: // // white-space-character:: // new-line // Space character (U+0020) // Horizontal-tab character (U+0009) // // single-line-comment:: // // input-characters-opt // # input-characters-opt // // new-line:: // Carriage-return character (U+000D) // Line-feed character (U+000A) // Carriage-return character followed by line-feed character fn str_scan_end_of_line(s: &[u8], i: usize) -> usize { match s.get(i).map(|x| *x as char) { None => i + 1, Some('\r') => match s.get(i + 1).map(|x| *x as char) { Some('\n') => 2 + i, _ => i + 1, }, Some('\n') => i + 1, _ => panic!("str_scan_end_of_line called while not on end of line!"), } } fn scan_end_of_line(&mut self) -> Trivium<TF> { match self.peek_char(0) { '\r' => { let w = if self.peek_char(1) == '\n' { 2 } else { 1 }; self.advance(w); Trivia::<TF>::make_eol(self.start, w) } '\n' => { self.advance(1); Trivia::<TF>::make_eol(self.start, 1) } _ => panic!("scan_end_of_line called while not on end of line!"), } } fn scan_single_line_comment(&mut self) -> Trivium<TF> { // A fallthrough comment is two slashes, any amount of whitespace, // FALLTHROUGH, and any characters may follow. // TODO: Consider allowing lowercase fallthrough. self.advance(2); self.skip_whitespace(); let lexer_ws = self.clone(); self.skip_to_end_of_line(); let w = self.width(); let remainder = self.offset - lexer_ws.offset; if remainder >= 11 && lexer_ws.peek_string(11) == b"FALLTHROUGH" { Trivia::<TF>::make_fallthrough(self.start, w) } else { Trivia::<TF>::make_single_line_comment(self.start, w) } } fn skip_to_end_of_delimited_comment(&mut self) { let mut offset = 0; loop { let ch0 = self.peek_char(offset); if ch0 == INVALID { self.advance(offset); if self.at_end() { return self.with_error(Errors::error0007); } else { // TODO: Do we want to give a warning for an embedded zero char // inside a comment? offset = 1; } } else if ch0 == '*' && (self.peek_char(offset + 1)) == '/' { return self.advance(offset + 2); } else { offset += 1 } } } fn scan_delimited_comment(&mut self) -> Trivium<TF> { // The original lexer lexes a fixme / ignore error as: // // slash star [whitespace]* HH_FIXME [whitespace or newline]* leftbracket // [whitespace or newline]* integer [any text]* star slash // // Notice that the original lexer oddly enough does not verify that there // is a right bracket. // // For our purposes we will just check for HH_FIXME / HH_IGNORE_ERROR; // a later pass can try to parse out the integer if there is one, // give a warning if there is not, and so on. self.advance(2); self.skip_whitespace(); let lexer_ws = self.clone(); self.skip_to_end_of_delimited_comment(); let w = self.width(); if lexer_ws.match_string(b"HH_FIXME") { Trivia::<TF>::make_fix_me(self.start, w) } else if lexer_ws.match_string(b"HH_IGNORE_ERROR") { Trivia::<TF>::make_ignore_error(self.start, w) } else { Trivia::<TF>::make_delimited_comment(self.start, w) } } fn scan_php_trivium(&mut self) -> Option<Trivium<TF>> { match self.peek_char(0) { '#' => { self.start_new_lexeme(); // Not trivia None } '/' => { self.start_new_lexeme(); match self.peek_char(1) { '/' => Some(self.scan_single_line_comment()), '*' => Some(self.scan_delimited_comment()), _ => None, } } ' ' | '\t' => { let new_end = Self::str_skip_whitespace(self.source_text_string(), self.offset); let new_start = self.offset; let new_trivia = Trivia::<TF>::make_whitespace(new_start, new_end - new_start); self.with_start_offset(new_start, new_end); Some(new_trivia) } '\r' | '\n' => { self.start_new_lexeme(); Some(self.scan_end_of_line()) } _ => { self.start_new_lexeme(); // Not trivia None } } } fn scan_xhp_trivium(&mut self) -> Option<Trivium<TF>> { // TODO: Should XHP comments <!-- --> be their own thing, or a kind of // trivia associated with a token? Right now they are the former. let i = self.offset; let ch = self.peek_char(0); match ch { ' ' | '\t' => { let j = Self::str_skip_whitespace(self.source_text_string(), i); self.with_start_offset(i, j); Some(Trivia::<TF>::make_whitespace(i, j - i)) } '\r' | '\n' => { let j = Self::str_scan_end_of_line(self.source_text_string(), i); self.with_start_offset(i, j); Some(Trivia::<TF>::make_eol(i, j - i)) } _ => // Not trivia { self.start_new_lexeme(); None } } } // We divide trivia into "leading" and "trailing" trivia of an associated // token. This means that we must find a dividing line between the trailing trivia // following one token and the leading trivia of the following token. Plainly // we need only find this line while scanning trailing trivia. The heuristics // we use are: // * The first newline trivia encountered is the last trailing trivia. // * The newline which follows a // or # comment is not part of the comment // but does terminate the trailing trivia. // * A pragma to turn checks off (HH_FIXME and HH_IGNORE_ERROR) is // always a leading trivia. fn scan_leading_trivia( &mut self, scanner: impl Fn(&mut Self) -> Option<Trivium<TF>>, ) -> Trivia<TF> { let mut acc = self.token_factory.trivia_factory_mut().make(); while let Some(t) = scanner(self) { acc.push(t) } acc } fn scan_leading_trivia_with_width( &mut self, scanner: impl Fn(&mut Self) -> Option<Trivium<TF>>, mut width: usize, ) -> Trivia<TF> { let mut acc = self.token_factory.trivia_factory_mut().make(); let mut extra_token_error_width = 0; let mut extra_token_error_offset = self.offset(); loop { if width == 0 { if extra_token_error_width > 0 { acc.push(Trivia::<TF>::make_extra_token_error( extra_token_error_offset, extra_token_error_width, )); } break acc; } if let Some(t) = scanner(self) { if extra_token_error_width > 0 { acc.push(Trivia::<TF>::make_extra_token_error( extra_token_error_offset, extra_token_error_width, )); extra_token_error_width = 0; extra_token_error_offset = self.start(); } width -= t.width(); acc.push(t); } else { self.advance(1); width -= 1; extra_token_error_width += 1; } } } pub fn scan_leading_php_trivia_with_width( &mut self, width: usize, ) -> <TF::Token as LexableToken>::Trivia { self.scan_leading_trivia_with_width(Self::scan_php_trivium, width) } pub fn scan_leading_xhp_trivia_with_width( &mut self, width: usize, ) -> <TF::Token as LexableToken>::Trivia { self.scan_leading_trivia_with_width(Self::scan_xhp_trivium, width) } pub(crate) fn scan_leading_php_trivia(&mut self) -> <TF::Token as LexableToken>::Trivia { self.scan_leading_trivia(Self::scan_php_trivium) } pub(crate) fn scan_leading_xhp_trivia(&mut self) -> <TF::Token as LexableToken>::Trivia { self.scan_leading_trivia(Self::scan_xhp_trivium) } fn scan_trailing_trivia( &mut self, scanner: impl Fn(&mut Self) -> Option<Trivium<TF>>, ) -> <TF::Token as LexableToken>::Trivia { let mut acc = self.token_factory.trivia_factory_mut().make(); loop { let mut lexer1 = self.clone(); match scanner(&mut lexer1) { None => { self.continue_from(lexer1); return acc; } Some(t) => match t.kind() { TriviaKind::EndOfLine => { self.continue_from(lexer1); acc.push(t); return acc; } TriviaKind::FixMe | TriviaKind::IgnoreError => { return acc; } _ => { self.continue_from(lexer1); acc.push(t) } }, } } } pub fn scan_trailing_php_trivia(&mut self) -> <TF::Token as LexableToken>::Trivia { self.scan_trailing_trivia(Self::scan_php_trivium) } pub fn scan_trailing_xhp_trivia(&mut self) -> <TF::Token as LexableToken>::Trivia { self.scan_trailing_trivia(Self::scan_xhp_trivium) } pub fn is_next_name(&self) -> bool { let mut lexer = self.clone(); lexer.scan_leading_php_trivia(); Self::is_name_nondigit(lexer.peek_char(0)) } pub fn is_next_xhp_class_name(&self) -> bool { let mut lexer = self.clone(); lexer.scan_leading_php_trivia(); lexer.is_xhp_class_name() } as_case_insensitive_keyword!( 12, "abstract", "as", "bool", "boolean", "break", "case", "catch", "class", "clone", "const", "continue", "default", "do", "echo", "else", "elseif", "empty", "endif", "eval", "exports", "extends", "false", "final", "finally", "for", "foreach", "function", "global", "if", "implements", "imports", "include", "include_once", "inout", "instanceof", "insteadof", "int", "integer", "interface", "isset", "list", "namespace", "new", "null", "parent", "print", "private", "protected", "public", "require", "require_once", "return", "self", "static", "string", "switch", "throw", "trait", "try", "true", "unset", "use", "using", "var", "void", "while", "yield" ); fn as_keyword(&mut self, only_reserved: bool, kind: TokenKind) -> TokenKind { if kind == TokenKind::Name { let original_text = self.current_text_as_str(); let (text, has_upper) = self .as_case_insensitive_keyword(original_text) .unwrap_or((original_text, false)); match TokenKind::from_string(text.as_bytes(), only_reserved) { Some(keyword) => { if has_upper && text != "true" && text != "false" && text != "null" { let err = Errors::uppercase_kw(original_text); self.with_error(err); } keyword } _ => TokenKind::Name, } } else { kind } } fn scan_token_and_leading_trivia( &mut self, scanner: impl Fn(&mut Self) -> TokenKind, as_name: KwSet, ) -> (TokenKind, usize, <TF::Token as LexableToken>::Trivia) { // Get past the leading trivia let leading = self.scan_leading_php_trivia(); // Remember where we were when we started this token self.start_new_lexeme(); let kind = scanner(self); let kind = match as_name { KwSet::AllKeywords => kind, KwSet::NonReservedKeywords => self.as_keyword(true, kind), KwSet::NoKeywords => self.as_keyword(false, kind), }; let w = self.width(); (kind, w, leading) } fn scan_token_and_trivia( &mut self, scanner: &impl Fn(&mut Self) -> TokenKind, as_name: KwSet, ) -> TF::Token { let token_start = self.offset; let (kind, w, leading) = self.scan_token_and_leading_trivia(scanner, as_name); let trailing = match kind { TokenKind::DoubleQuotedStringLiteralHead => { self.token_factory.trivia_factory_mut().make() } _ => self.scan_trailing_php_trivia(), }; self.token_factory .make(kind, token_start, w, leading, trailing) } fn scan_assert_progress(&mut self, tokenizer: impl Fn(&mut Self) -> TF::Token) -> TF::Token { let original_remaining = self.remaining(); let token = tokenizer(self); let new_remaining = self.remaining(); if new_remaining < original_remaining || original_remaining == 0 && new_remaining == 0 && (token.kind()) == TokenKind::EndOfFile { token } else { panic!( "failed to make progress at {} {} {} {:?}\n", self.offset, original_remaining, new_remaining, token.kind() ) } } fn scan_next_token( &mut self, scanner: impl Fn(&mut Self) -> TokenKind, as_name: KwSet, ) -> TF::Token { let tokenizer = |x: &mut Self| x.scan_token_and_trivia(&scanner, as_name); self.scan_assert_progress(tokenizer) } fn scan_next_token_as_name(&mut self, scanner: impl Fn(&mut Self) -> TokenKind) -> TF::Token { self.scan_next_token(scanner, KwSet::AllKeywords) } fn scan_next_token_as_keyword( &mut self, scanner: impl Fn(&mut Self) -> TokenKind, ) -> TF::Token { self.scan_next_token(scanner, KwSet::NoKeywords) } fn scan_next_token_nonreserved_as_name( &mut self, scanner: impl Fn(&mut Self) -> TokenKind, ) -> TF::Token { self.scan_next_token(scanner, KwSet::NonReservedKeywords) } fn next_token_impl(&mut self) -> TF::Token { if self.in_type { self.scan_next_token_as_keyword(Self::scan_token_inside_type) } else { self.scan_next_token_as_keyword(Self::scan_token_outside_type) } } // Entrypoints pub fn peek_next_token(&self) -> TF::Token { { let cache = self.cache.borrow(); if let Some(cache) = cache.as_ref() { if cache.0 == *self { return cache.1.clone(); } } } let mut lexer = self.clone(); lexer.errors = vec![]; let before = lexer.to_lexer_pre_snapshot(); let token = lexer.next_token_impl(); let after = lexer.into_lexer_post_snapshot(); self.cache .replace(Some(LexerCache(before, token.clone(), after))); token } pub fn next_token(&mut self) -> TF::Token { { let mut cache = self.cache.borrow_mut(); if let Some(ref mut cache) = cache.deref_mut() { if cache.0 == *self { self.start = (cache.2).start; self.offset = (cache.2).offset; self.in_type = (cache.2).in_type; if !(cache.2).errors.is_empty() { self.errors.append(&mut (cache.2).errors.clone()); } return cache.1.clone(); } } } self.next_token_impl() } pub fn next_token_no_trailing(&mut self) -> TF::Token { let tokenizer = |x: &mut Self| { let token_start = x.offset; let (kind, w, leading) = x.scan_token_and_leading_trivia(Self::scan_token_outside_type, KwSet::NoKeywords); let trailing = x.token_factory.trivia_factory_mut().make(); x.token_factory .make(kind, token_start, w, leading, trailing) }; self.scan_assert_progress(tokenizer) } pub fn next_token_in_string(&mut self, literal_kind: &StringLiteralKind) -> TF::Token { let token_start = self.offset; self.start_new_lexeme(); // We're inside a string. Do not scan leading trivia. let kind = self.scan_string_literal_in_progress(literal_kind); let w = self.width(); // Only scan trailing trivia if we've finished the string. let trailing = match kind { TokenKind::DoubleQuotedStringLiteralTail | TokenKind::HeredocStringLiteralTail => { self.scan_trailing_php_trivia() } _ => self.token_factory.trivia_factory_mut().make(), }; let leading = self.token_factory.trivia_factory_mut().make(); self.token_factory .make(kind, token_start, w, leading, trailing) } pub fn next_docstring_header(&mut self) -> (TF::Token, &'a [u8]) { // We're at the beginning of a heredoc string literal. Scan leading // trivia but not trailing trivia. let token_start = self.offset; let leading = self.scan_leading_php_trivia(); self.start_new_lexeme(); let (name, _) = self.scan_docstring_header(); let w = self.width(); let trailing = self.token_factory.trivia_factory_mut().make(); let token = self.token_factory.make( TokenKind::HeredocStringLiteralHead, token_start, w, leading, trailing, ); (token, name) } pub fn next_token_as_name(&mut self) -> TF::Token { self.scan_next_token_as_name(Self::scan_token_outside_type) } pub fn next_token_non_reserved_as_name(&mut self) -> TF::Token { self.scan_next_token_nonreserved_as_name(Self::scan_token_outside_type) } pub fn next_xhp_element_token(&mut self, no_trailing: bool) -> (TF::Token, &[u8]) { // XHP elements have whitespace, newlines and Hack comments. let tokenizer = |lexer: &mut Self| { let token_start = lexer.offset; let (kind, w, leading) = lexer.scan_token_and_leading_trivia(Self::scan_xhp_token, KwSet::AllKeywords); // We do not scan trivia after an XHPOpen's >. If that is the beginning of // an XHP body then we want any whitespace or newlines to be leading trivia // of the body token. match kind { TokenKind::GreaterThan | TokenKind::SlashGreaterThan if no_trailing => { let trailing = lexer.token_factory.trivia_factory_mut().make(); lexer .token_factory .make(kind, token_start, w, leading, trailing) } _ => { let trailing = lexer.scan_trailing_php_trivia(); lexer .token_factory .make(kind, token_start, w, leading, trailing) } } }; let token = self.scan_assert_progress(tokenizer); let token_width = token.width(); let trailing_width = token.trailing_width(); let token_start_offset = (self.offset) - trailing_width - token_width; let token_text = self.source.sub(token_start_offset, token_width); (token, token_text) } pub fn next_xhp_body_token(&mut self) -> TF::Token { let scanner = |lexer: &mut Self| { let token_start = lexer.offset; let leading = lexer.scan_leading_xhp_trivia(); lexer.start_new_lexeme(); let kind = lexer.scan_xhp_body(); let w = lexer.width(); let trailing = // Trivia (leading and trailing) is semantically // significant for XHPBody tokens. When we find elements or // braced expressions inside the body, the trivia should be // seen as leading the next token, but we should certainly // keep it trailing if this is an XHPBody token. if kind == TokenKind::XHPBody { lexer.scan_trailing_xhp_trivia() } else { lexer.token_factory.trivia_factory_mut().make() }; lexer .token_factory .make(kind, token_start, w, leading, trailing) }; self.scan_assert_progress(scanner) } // // When the xhp modifier is used for declaring xhp classes // we do not allow colon prefixes or dashes. // // This ensures that the syntax is closer to regular classes. // pub fn next_xhp_modifier_class_name(&mut self) -> TF::Token { self.scan_token_and_trivia(&Self::scan_xhp_modifier_class_name, KwSet::NoKeywords) } pub fn next_xhp_class_name(&mut self) -> TF::Token { self.scan_token_and_trivia(&Self::scan_xhp_class_name, KwSet::NoKeywords) } pub fn next_xhp_name(&mut self) -> TF::Token { let scanner = |x: &mut Self| x.scan_xhp_element_name(false); self.scan_token_and_trivia(&scanner, KwSet::NoKeywords) } fn make_hashbang_token(&mut self) -> TF::Token { let leading = self.token_factory.trivia_factory_mut().make(); self.skip_to_end_of_line(); let token_start = self.start; let token_width = self.width(); let trailing = self.scan_trailing_php_trivia(); self.start_new_lexeme(); self.token_factory.make( TokenKind::Hashbang, token_start, token_width, leading, trailing, ) } fn make_long_tag( &mut self, name_token_offset: usize, size: usize, less_than_question_token: TF::Token, ) -> (TF::Token, Option<TF::Token>) { // skip name self.advance(size); // single line comments that follow the language in leading markup_text // determine the file check mode, read the trailing trivia and attach it // to the language token let trailing = self.scan_trailing_php_trivia(); let leading = self.token_factory.trivia_factory_mut().make(); let name = self.token_factory .make(TokenKind::Name, name_token_offset, size, leading, trailing); (less_than_question_token, Some(name)) } fn make_markup_suffix(&mut self) -> (TF::Token, Option<TF::Token>) { let leading = self.token_factory.trivia_factory_mut().make(); let trailing = self.token_factory.trivia_factory_mut().make(); let less_than_question_token = self.token_factory.make( TokenKind::LessThanQuestion, self.offset, 2, leading, trailing, ); // skip <? self.advance(2); let name_token_offset = self.offset; let ch0 = self.peek_char(0).to_ascii_lowercase(); let ch1 = self.peek_char(1).to_ascii_lowercase(); match (ch0, ch1) { ('h', 'h') => self.make_long_tag(name_token_offset, 2, less_than_question_token), _ => (less_than_question_token, (None)), } } fn skip_to_end_of_header( &mut self, ) -> (Option<TF::Token>, Option<(TF::Token, Option<TF::Token>)>) { let start_offset = { // if leading section starts with #! - it should span the entire line if self.offset != 0 { panic!("Should only try to lex header at start of document") }; // this should really just be `self.offset` - but, skip whitespace as the FFP // tests use magic comments in leading markup to set flags, but blank // them out before parsing; the newlines are kept to provide correct line // numbers in errors self.skip_while_to_offset(|x| Self::is_newline(x) || Self::is_whitespace_no_newline(x)) }; let hashbang = if self.peek_def(start_offset, INVALID) == '#' && self.peek_def(start_offset + 1, INVALID) == '!' { self.with_offset(start_offset); Some(self.make_hashbang_token()) } else { None }; let start_offset = self.skip_while_to_offset(|x| Self::is_newline(x) || Self::is_whitespace_no_newline(x)); let suffix = if self.peek_def(start_offset, INVALID) == '<' && self.peek_def(start_offset + 1, INVALID) == '?' { self.with_offset(start_offset); Some(self.make_markup_suffix()) } else { None }; (hashbang, suffix) } pub fn scan_header(&mut self) -> (Option<TF::Token>, Option<(TF::Token, Option<TF::Token>)>) { self.start_new_lexeme(); self.skip_to_end_of_header() } pub fn is_next_xhp_category_name(&self) -> bool { let mut lexer = self.clone(); let _ = lexer.scan_leading_php_trivia(); // An XHP category is an xhp element name preceded by a %. let ch0 = lexer.peek_char(0); let ch1 = lexer.peek_char(1); ch0 == '%' && Self::is_name_nondigit(ch1) } fn scan_xhp_category_name(&mut self) -> TokenKind { if self.is_next_xhp_category_name() { self.advance(1); let _ = self.scan_xhp_element_name(false); TokenKind::XHPCategoryName } else { self.scan_token(false) } } pub fn next_xhp_category_name(&mut self) -> TF::Token { self.scan_token_and_trivia(&Self::scan_xhp_category_name, KwSet::NoKeywords) } }
Rust
hhvm/hphp/hack/src/parser/core/lib.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. pub mod lexer; pub use operator; pub use operator::*; pub mod parser; pub use smart_constructors; pub use smart_constructors::*; pub mod parser_trait; pub mod declaration_parser; pub mod expression_parser; pub mod pattern_parser; pub mod statement_parser; pub mod type_parser; // The "parser_core_types" crate contains data definitions for a variety of data structures // and types used by the parser and consumers of the parser. These data types were recently // split out from this crate. In order to keep the same library facade, the relevant modules // are re-exported here so that consumers do not need to be made aware of the data-code split. pub use parser_core_types::compact_token; pub use parser_core_types::compact_trivia; pub use parser_core_types::indexed_source_text; pub use parser_core_types::lexable_token; pub use parser_core_types::lexable_trivia; pub use parser_core_types::minimal_trivia; pub use parser_core_types::parser_env; pub use parser_core_types::positioned_syntax; pub use parser_core_types::positioned_token; pub use parser_core_types::positioned_trivia; pub use parser_core_types::source_text; pub use parser_core_types::syntax; pub use parser_core_types::syntax_by_ref; pub use parser_core_types::syntax_error; pub use parser_core_types::syntax_kind; pub use parser_core_types::syntax_trait; pub use parser_core_types::token_factory; pub use parser_core_types::token_kind; pub use parser_core_types::trivia_factory; pub use parser_core_types::trivia_kind;
Rust
hhvm/hphp/hack/src/parser/core/parser.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use parser_core_types::source_text::SourceText; use parser_core_types::syntax_error::SyntaxError; use crate::declaration_parser::DeclarationParser; use crate::lexer::Lexer; use crate::parser_env::ParserEnv; use crate::parser_trait::Context; use crate::parser_trait::ParserTrait; use crate::smart_constructors::NodeType; use crate::smart_constructors::SmartConstructors; pub struct Parser<'a, S> where S: SmartConstructors, S::Output: NodeType, { lexer: Lexer<'a, S::Factory>, errors: Vec<SyntaxError>, env: ParserEnv, sc: S, } impl<'a, S> Parser<'a, S> where S: SmartConstructors, S::Output: NodeType, { pub fn new(source: &SourceText<'a>, env: ParserEnv, mut sc: S) -> Self { let source = source.clone(); Self { lexer: Lexer::make(&source, sc.token_factory_mut().clone()), errors: vec![], env, sc, } } pub fn into_parts(self) -> (Lexer<'a, S::Factory>, Vec<SyntaxError>, ParserEnv, S) { (self.lexer, self.errors, self.env, self.sc) } pub fn parse_header_only( env: ParserEnv, text: &'a SourceText<'a>, sc: S, ) -> Option<<S::Output as NodeType>::Output> { let (lexer, errors, env, sc) = Self::new(text, env, sc).into_parts(); let mut decl_parser: DeclarationParser<'_, S> = DeclarationParser::make(lexer, env, Context::empty(), errors, sc); decl_parser .parse_leading_markup_section() .map(|r| r.extract()) } pub fn parse_script(&mut self) -> <S::Output as NodeType>::Output { let mut decl_parser: DeclarationParser<'_, S> = DeclarationParser::make( self.lexer.clone(), self.env.clone(), Context::empty(), vec![], self.sc.clone(), ); let root = decl_parser.parse_script().extract(); let (lexer, _context, errors, sc) = decl_parser.into_parts(); self.errors = errors; self.sc = sc; self.lexer = lexer; root } pub fn errors(&self) -> Vec<SyntaxError> { let mut res = vec![]; res.extend_from_slice(self.lexer.errors()); res.extend(self.errors.clone()); res.reverse(); res } pub fn sc_state(&mut self) -> &S::State { self.sc.state_mut() } pub fn into_sc_state(self) -> S::State { self.sc.into_state() } }
Rust
hhvm/hphp/hack/src/parser/core/parser_trait.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use parser_core_types::lexable_token::LexableToken; use parser_core_types::lexable_trivia::LexableTrivia; use parser_core_types::syntax_error::Error; use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_error::{self as Errors}; use parser_core_types::token_factory::TokenFactory; use parser_core_types::token_kind::TokenKind; use parser_core_types::trivia_factory::TriviaFactory; use crate::lexer; use crate::lexer::Lexer; use crate::parser_env::ParserEnv; use crate::smart_constructors::NodeType; use crate::smart_constructors::SmartConstructors; use crate::smart_constructors::Token; use crate::smart_constructors::Trivia; #[derive(PartialEq)] pub enum SeparatedListKind { NoTrailing, TrailingAllowed, ItemsOptional, } // This could be a set of token kinds, but it's part of parser envirnoment that is often cloned, // so trying to keep it small. #[derive(Debug, Clone, Copy, PartialEq)] pub enum ExpectedTokens { Classish = 0b0001, Semicolon = 0b0010, RightParen = 0b0100, Visibility = 0b1000, } const ET_COUNT: u32 = 4; const ET_MASK: ETMask = (1 << ET_COUNT) - 1; type ETMask = u16; // mask of bits in first ET_COUNT bits impl ExpectedTokens { pub fn contains(mask: ETMask, token: TokenKind) -> bool { use ExpectedTokens::*; let bit: ETMask = match token { TokenKind::Class | TokenKind::Trait | TokenKind::Interface => Classish as ETMask, TokenKind::Semicolon => Semicolon as ETMask, TokenKind::RightParen => RightParen as ETMask, TokenKind::Public | TokenKind::Protected | TokenKind::Private | TokenKind::Internal => { Visibility as ETMask } _ => 0_u16, }; (bit & mask) != 0 } fn from(bit: ETMask) -> ExpectedTokens { // debug_assert!((bit & (!bit+1)) == bit, "unexpected multiple set bits in {:#b}"); use ExpectedTokens::*; match bit { 0b0001 => Classish, 0b0010 => Semicolon, 0b0100 => RightParen, _ => Visibility, } } } #[derive(Debug, Clone)] pub struct ExpectedTokenVec(Vec<ETMask>); impl ExpectedTokenVec { fn push(&mut self, et: ExpectedTokens) { let last_mask = *self.0.last().unwrap_or(&0) & ET_MASK; let bit = et as ETMask; self.0.push(bit | last_mask | (bit << ET_COUNT)); } fn pop(&mut self) -> Option<ExpectedTokens> { self.0.pop().map(|x| ExpectedTokens::from(x >> ET_COUNT)) } fn last_mask(&self) -> ETMask { self.0.last().map_or(0, |x| x >> ET_COUNT) } fn any_mask(&self) -> ETMask { self.0.last().map_or(0, |x| x & ET_MASK) } } #[derive(Debug, Clone)] pub struct Context<T> { pub expected: ExpectedTokenVec, pub skipped_tokens: Vec<T>, } impl<T> Context<T> { pub fn empty() -> Self { Self { expected: ExpectedTokenVec(vec![]), skipped_tokens: vec![], } } fn expect_in_new_scope(&mut self, expected: ExpectedTokens) { self.expected.push(expected); } fn pop_scope(&mut self, expected: ExpectedTokens) { let scope = self.expected.pop().unwrap(); assert_eq!(expected, scope) } fn expects(&self, token_kind: TokenKind) -> bool { ExpectedTokens::contains(self.expected.any_mask(), token_kind) } fn expects_here(&self, token_kind: TokenKind) -> bool { ExpectedTokens::contains(self.expected.last_mask(), token_kind) } } pub trait ParserTrait<'a, S>: Clone where S: SmartConstructors, <S as SmartConstructors>::Output: NodeType, { fn make( _: Lexer<'a, S::Factory>, _: ParserEnv, _: Context<Token<S>>, _: Vec<SyntaxError>, _: S, ) -> Self; fn add_error(&mut self, _: SyntaxError); fn into_parts( self, ) -> ( Lexer<'a, S::Factory>, Context<Token<S>>, Vec<SyntaxError>, S, ); fn lexer(&self) -> &Lexer<'a, S::Factory>; fn lexer_mut(&mut self) -> &mut Lexer<'a, S::Factory>; fn continue_from<P: ParserTrait<'a, S>>(&mut self, _: P); fn env(&self) -> &ParserEnv; fn sc_mut(&mut self) -> &mut S; fn skipped_tokens(&self) -> &[Token<S>]; fn drain_skipped_tokens(&mut self) -> std::vec::Drain<'_, Token<S>>; fn context_mut(&mut self) -> &mut Context<Token<S>>; fn context(&self) -> &Context<Token<S>>; fn pos(&self) -> usize { self.lexer().offset() } fn add_skipped_token(&mut self, token: Token<S>) { self.context_mut().skipped_tokens.push(token) } fn expects(&self, kind: TokenKind) -> bool { self.context().expects(kind) } fn expects_here(&self, kind: TokenKind) -> bool { self.context().expects_here(kind) } fn expect_in_new_scope(&mut self, expected: ExpectedTokens) { self.context_mut().expect_in_new_scope(expected) } fn pop_scope(&mut self, expected: ExpectedTokens) { self.context_mut().pop_scope(expected) } // This function reports an error starting at the current location of the // parser. Setting on_whole_token=false will report the error only on trivia, // which is useful in cases such as when "a semicolon is expected here" before // the current node. However, setting on_whole_token=true will report the error // only on the non-trivia text of the next token parsed, which is useful // in cases like "flagging an entire token as an extra". fn with_error_impl(&mut self, on_whole_token: bool, message: Error) { let (start_offset, end_offset) = self.error_offsets(on_whole_token); let error = SyntaxError::make(start_offset, end_offset, message, vec![]); self.add_error(error) } fn with_error(&mut self, message: Error) { self.with_error_impl(false, message) } fn with_error_on_whole_token(&mut self, message: Error) { self.with_error_impl(true, message) } fn next_token_with_tokenizer<F>(&mut self, tokenizer: F) -> Token<S> where F: Fn(&mut Lexer<'a, S::Factory>) -> Token<S>, { let token = tokenizer(self.lexer_mut()); if !self.skipped_tokens().is_empty() { let start = self.lexer().start(); let mut leading = self .sc_mut() .token_factory_mut() .trivia_factory_mut() .make(); for t in self.drain_skipped_tokens() { let (t_leading, t_width, t_trailing) = t.into_trivia_and_width(); leading.extend(t_leading); leading.push(Trivia::<S>::make_extra_token_error(start, t_width)); leading.extend(t_trailing); } leading.extend(token.clone_leading()); self.sc_mut() .token_factory_mut() .with_leading(token, leading) } else { token } } fn next_token(&mut self) -> Token<S> { self.next_token_with_tokenizer(|x| x.next_token()) } fn next_token_no_trailing(&mut self) -> Token<S> { self.lexer_mut().next_token_no_trailing() } fn next_docstring_header(&mut self) -> (Token<S>, &'a [u8]) { self.lexer_mut().next_docstring_header() } fn next_token_in_string(&mut self, literal_kind: &lexer::StringLiteralKind) -> Token<S> { self.lexer_mut().next_token_in_string(literal_kind) } fn next_xhp_class_name_or_other(&mut self) -> S::Output { let token = self.next_xhp_class_name_or_other_token(); match token.kind() { TokenKind::Namespace | TokenKind::Name => { let name_token = self.sc_mut().make_token(token); self.scan_remaining_qualified_name(name_token) } TokenKind::Backslash => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let backslash = self.sc_mut().make_token(token); self.scan_qualified_name(missing, backslash) } _ => self.sc_mut().make_token(token), } } fn next_xhp_children_name_or_other(&mut self) -> Token<S> { if self.is_next_xhp_category_name() { self.next_xhp_category_name() } else if self.env().enable_xhp_class_modifier { self.next_xhp_modifier_class_name_or_other_token() } else { self.next_xhp_class_name_or_other_token() } } // Used in conjunction with the following function. If you call next_token // when the parser is at the <<<, it will scan the entire file looking for an // ending to the heredoc, which could quickly get bad if there are many such // declarations in a file. fn peek_next_partial_token_is_triple_left_angle(&self) -> bool { let mut lexer = self.lexer().clone(); lexer.scan_leading_php_trivia(); let tparam_open = lexer.peek_char(0); let attr1 = lexer.peek_char(1); let attr2 = lexer.peek_char(2); tparam_open == '<' && attr1 == '<' && attr2 == '<' } // Type parameter/argument lists begin with < and can have attributes immediately // afterwards, so this peeks a token kind at the beginning of such a list. *) fn peek_token_kind_with_possible_attributized_type_list(&self) -> TokenKind { if self.peek_next_partial_token_is_triple_left_angle() { TokenKind::LessThan } else { self.peek_token_kind() } } // In the case of attributes on generics, one could write // function f<<<__Attr>> reify T, ...> or Awaitable<<<__Soft>> int> // The triple left angle is currently lexed as a HeredocStringLiteral, // but we can get around this by manually advancing the lexer one token // and returning a LeftAngle. Then, the next token will be a LeftAngleLeftAngle fn assert_left_angle_in_type_list_with_possible_attribute(&mut self) -> S::Output { let parser1 = self.clone(); let lexer = self.lexer_mut(); lexer.scan_leading_php_trivia(); let tparam_open = lexer.peek_char(0); let attr1 = lexer.peek_char(1); let attr2 = lexer.peek_char(2); if tparam_open == '<' && attr1 == '<' && attr2 == '<' { lexer.advance(1); let start = lexer.start(); let token_factory = self.sc_mut().token_factory_mut(); let leading = token_factory.trivia_factory_mut().make(); let trailing = token_factory.trivia_factory_mut().make(); let token = token_factory.make(TokenKind::LessThan, start, 1, leading, trailing); self.sc_mut().make_token(token) } else { self.continue_from(parser1); self.assert_token(TokenKind::LessThan) } } fn assert_xhp_body_token(&mut self, kind: TokenKind) -> S::Output { self.assert_token_with_tokenizer(kind, |x: &mut Lexer<'a, S::Factory>| { x.next_xhp_body_token() }) } fn peek_token_with_lookahead(&self, lookahead: usize) -> Token<S> { let mut lexer = self.lexer().clone(); let mut i = 0; loop { if i == lookahead { // call peek_next_token instead of next_token for the last one to leverage // lexer caching return lexer.peek_next_token(); } let _ = lexer.next_token(); i += 1 } } fn peek_token(&self) -> Token<S> { self.lexer().peek_next_token() } fn peek_token_kind(&self) -> TokenKind { self.peek_token().kind() } fn peek_token_kind_with_lookahead(&self, lookahead: usize) -> TokenKind { self.peek_token_with_lookahead(lookahead).kind() } fn fetch_token(&mut self) -> S::Output { let token = self.lexer_mut().next_token(); self.sc_mut().make_token(token) } fn assert_token_with_tokenizer<F>(&mut self, kind: TokenKind, tokenizer: F) -> S::Output where F: Fn(&mut Lexer<'a, S::Factory>) -> Token<S>, { let token = self.next_token_with_tokenizer(tokenizer); if token.kind() != kind { panic!( "Expected {:?}, but got {:?}. This indicates a bug in the parser, regardless of how broken the input code is.", kind, token.kind() ) } self.sc_mut().make_token(token) } fn assert_token(&mut self, kind: TokenKind) -> S::Output { self.assert_token_with_tokenizer(kind, |x: &mut Lexer<'_, S::Factory>| x.next_token()) } fn token_text(&self, token: &Token<S>) -> &'a str { match token.leading_start_offset() { None => "", // unavailable for minimal tokens Some(leading_start_offset) => unsafe { std::str::from_utf8_unchecked( self.lexer() .source() .sub(leading_start_offset + token.leading_width(), token.width()), ) }, } } fn current_token_text(&self) -> &'a str { self.token_text(&self.peek_token()) } // If the next token is a name or keyword, scan it as a name. fn next_token_as_name(&mut self) -> Token<S> { // TODO: This isn't right. Pass flags to the lexer. self.lexer_mut().next_token_as_name() } fn optional_token(&mut self, kind: TokenKind) -> S::Output { if self.peek_token_kind() == kind { let token = self.next_token(); self.sc_mut().make_token(token) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn scan_qualified_name_worker( &mut self, mut name_opt: Option<S::Output>, mut parts: Vec<S::Output>, mut has_backslash: bool, ) -> (Vec<S::Output>, Option<S::Output>, bool) { loop { let mut parser1 = self.clone(); let token = if parser1.is_next_xhp_class_name() { parser1.next_xhp_class_name() } else { parser1.next_token_as_name() }; match (name_opt.is_some(), token.kind()) { (true, TokenKind::Backslash) => { // found backslash, create item and recurse self.continue_from(parser1); let token = self.sc_mut().make_token(token); let part = self.sc_mut().make_list_item(name_opt.unwrap(), token); parts.push(part); has_backslash = true; name_opt = None; } (false, TokenKind::Name) => { // found a name, recurse to look for backslash self.continue_from(parser1); let token = self.sc_mut().make_token(token); name_opt = Some(token); has_backslash = false; } (true, _) if parts.is_empty() => { // have not found anything - return [] to indicate failure return (parts, name_opt, false); } (true, _) => { // next token is not part of qualified name but we've consume some // part of the input - create part for name with missing backslash // and return accumulated result let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let part = self.sc_mut().make_list_item(name_opt.unwrap(), missing); // TODO(T25649779) parts.push(part); return (parts, None, false); } _ => { // next token is not part of qualified name - return accumulated result return (parts, name_opt, has_backslash); } } } } fn scan_remaining_qualified_name_extended( &mut self, name_token: S::Output, ) -> (S::Output, bool) { let (parts, name_token_opt, is_backslash) = self.scan_qualified_name_worker(Some(name_token), vec![], false); if parts.is_empty() { (name_token_opt.unwrap(), is_backslash) } else { let pos = self.pos(); let list_node = self.sc_mut().make_list(parts, pos); let name = self.sc_mut().make_qualified_name(list_node); (name, is_backslash) } } fn scan_qualified_name_extended( &mut self, missing: S::Output, backslash: S::Output, ) -> (S::Output, bool) { let head = self.sc_mut().make_list_item(missing, backslash); let parts = vec![head]; let (parts, _, is_backslash) = self.scan_qualified_name_worker(None, parts, false); let pos = self.pos(); let list_node = self.sc_mut().make_list(parts, pos); let name = self.sc_mut().make_qualified_name(list_node); (name, is_backslash) } fn scan_qualified_name(&mut self, missing: S::Output, backslash: S::Output) -> S::Output { let (name, _) = self.scan_qualified_name_extended(missing, backslash); name } // If the next token is a name or an non-reserved keyword, scan it as // a name otherwise as a keyword. // // NB: A "reserved" keyword is in practice a keyword that cannot be used // as a class name or function name, for example, control flow keywords or // declaration keywords are reserved. fn next_token_non_reserved_as_name(&mut self) -> Token<S> { self.next_token_with_tokenizer(|l| l.next_token_non_reserved_as_name()) } fn scan_header(&mut self) -> (Option<Token<S>>, Option<(Token<S>, Option<Token<S>>)>) { self.lexer_mut().scan_header() } fn error_offsets(&mut self, on_whole_token: bool /* = false */) -> (usize, usize) { if on_whole_token { let token = self.peek_token(); let start_offset = self.lexer().offset() + token.leading_width(); let end_offset = start_offset + token.width(); (start_offset, end_offset) } else { let start_offset = self.lexer().start(); let end_offset = self.lexer().offset(); (start_offset, end_offset) } } fn scan_name_or_qualified_name(&mut self) -> S::Output { let mut parser1 = self.clone(); let token = parser1.next_token_non_reserved_as_name(); match token.kind() { TokenKind::Namespace | TokenKind::Name => { self.continue_from(parser1); let token = self.sc_mut().make_token(token); self.scan_remaining_qualified_name(token) } TokenKind::Backslash => { self.continue_from(parser1); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let token = self.sc_mut().make_token(token); self.scan_qualified_name(missing, token) } _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } } } fn parse_alternate_if_block<F>(&mut self, parse_item: F) -> S::Output where F: Fn(&mut Self) -> S::Output, { let mut parser1 = self.clone(); let block = parser1.parse_list_while(parse_item, |x: &Self| match x.peek_token_kind() { TokenKind::Else | TokenKind::Endif => false, _ => true, }); if block.is_missing() { let pos = self.pos(); let empty1 = self.sc_mut().make_missing(pos); let pos = self.pos(); let empty2 = self.sc_mut().make_missing(pos); let es = self.sc_mut().make_expression_statement(empty1, empty2); let pos = self.pos(); self.sc_mut().make_list(vec![es], pos) } else { self.continue_from(parser1); block } } fn parse_separated_list<F>( &mut self, separator_kind: TokenKind, allow_trailing: SeparatedListKind, close_kind: TokenKind, error: Error, parse_item: F, ) -> (S::Output, bool) where F: Fn(&mut Self) -> S::Output, { let (x, y, _) = self.parse_separated_list_predicate( |x| x == separator_kind, allow_trailing, |x| x == close_kind, error, parse_item, ); (x, y) } fn require_qualified_name(&mut self) -> S::Output { let mut parser1 = self.clone(); let name = if parser1.is_next_xhp_class_name() { parser1.next_xhp_class_name() } else { parser1.next_token_non_reserved_as_name() }; match name.kind() { TokenKind::Namespace | TokenKind::Name | TokenKind::XHPClassName => { self.continue_from(parser1); let token = self.sc_mut().make_token(name); self.scan_remaining_qualified_name(token) } TokenKind::Backslash => { self.continue_from(parser1); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let backslash = self.sc_mut().make_token(name); self.scan_qualified_name(missing, backslash) } _ => { self.with_error(Errors::error1004); let pos = self.pos(); self.sc_mut().make_missing(pos) } } } fn require_qualified_module_name(&mut self) -> S::Output { let mut parts = vec![]; loop { let name = self.require_name_allow_non_reserved(); if name.is_missing() { break; } let dot = self.optional_token(TokenKind::Dot); let dot_is_missing = dot.is_missing(); parts.push(self.sc_mut().make_list_item(name, dot)); if dot_is_missing { break; } } let pos = self.pos(); let list_node = self.sc_mut().make_list(parts, pos); self.sc_mut().make_module_name(list_node) } fn require_qualified_referenced_module_name(&mut self) -> S::Output { let mut parts = vec![]; if self.peek_token_kind() == TokenKind::Global { let global = self.require_token(TokenKind::Global, Errors::error1004); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); parts.push(self.sc_mut().make_list_item(global, missing)); } else { loop { let next_token_kind = self.peek_token_kind(); if next_token_kind == TokenKind::Star { let star = self.require_token(TokenKind::Star, Errors::error1004); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); parts.push(self.sc_mut().make_list_item(star, missing)); break; } let name = if next_token_kind == TokenKind::SelfToken && parts.is_empty() { self.require_token(TokenKind::SelfToken, Errors::error1004) } else { self.require_token(TokenKind::Name, Errors::error1004) }; if name.is_missing() { break; } let dot = self.optional_token(TokenKind::Dot); let dot_is_missing = dot.is_missing(); parts.push(self.sc_mut().make_list_item(name, dot)); if dot_is_missing { break; } } } let pos = self.pos(); let list_node = self.sc_mut().make_list(parts, pos); self.sc_mut().make_module_name(list_node) } fn require_name(&mut self) -> S::Output { self.require_token(TokenKind::Name, Errors::error1004) } fn require_xhp_class_name(&mut self) -> S::Output { let token = self.next_xhp_modifier_class_name(); self.sc_mut().make_token(token) } fn require_xhp_class_name_or_name(&mut self) -> S::Output { if self.is_next_xhp_class_name() { let token = self.next_xhp_class_name(); self.sc_mut().make_token(token) } else { self.require_token(TokenKind::Name, Errors::error1004) } } /// Require that the next node is either: /// - A normal class name (`\w+`) /// - An XHP class name (`(:(\w-)+)+`) fn require_maybe_xhp_class_name(&mut self) -> S::Output { if self.is_next_xhp_class_name() { let token = self.next_xhp_class_name(); self.sc_mut().make_token(token) } else { self.require_name_allow_non_reserved() } } fn require_function(&mut self) -> S::Output { self.require_token(TokenKind::Function, Errors::error1003) } fn require_variable(&mut self) -> S::Output { self.require_token(TokenKind::Variable, Errors::error1008) } fn require_colon(&mut self) -> S::Output { self.require_token(TokenKind::Colon, Errors::error1020) } fn require_left_brace(&mut self) -> S::Output { self.require_token(TokenKind::LeftBrace, Errors::error1034) } fn require_slashgt(&mut self) -> S::Output { self.require_token(TokenKind::SlashGreaterThan, Errors::error1029) } fn require_right_brace(&mut self) -> S::Output { self.require_token(TokenKind::RightBrace, Errors::error1006) } fn require_left_paren(&mut self) -> S::Output { self.require_token(TokenKind::LeftParen, Errors::error1019) } fn require_left_angle(&mut self) -> S::Output { self.require_token(TokenKind::LessThan, Errors::error1021) } fn require_right_angle(&mut self) -> S::Output { self.require_token(TokenKind::GreaterThan, Errors::error1013) } fn require_comma(&mut self) -> S::Output { self.require_token(TokenKind::Comma, Errors::error1054) } fn require_right_bracket(&mut self) -> S::Output { self.require_token(TokenKind::RightBracket, Errors::error1032) } fn require_equal(&mut self) -> S::Output { self.require_token(TokenKind::Equal, Errors::error1036) } fn require_arrow(&mut self) -> S::Output { self.require_token(TokenKind::EqualGreaterThan, Errors::error1028) } fn require_lambda_arrow(&mut self) -> S::Output { self.require_token(TokenKind::EqualEqualGreaterThan, Errors::error1046) } fn require_as(&mut self) -> S::Output { self.require_token(TokenKind::As, Errors::error1023) } fn require_while(&mut self) -> S::Output { self.require_token(TokenKind::While, Errors::error1018) } fn require_coloncolon(&mut self) -> S::Output { self.require_token(TokenKind::ColonColon, Errors::error1047) } fn require_name_or_variable_or_error(&mut self, error: Error) -> S::Output { let mut parser1 = self.clone(); let token = parser1.next_token_as_name(); match token.kind() { TokenKind::Namespace | TokenKind::Name => { self.continue_from(parser1); let token = self.sc_mut().make_token(token); self.scan_remaining_qualified_name(token) } TokenKind::Variable => { self.continue_from(parser1); self.sc_mut().make_token(token) } _ => { // ERROR RECOVERY: Create a missing token for the expected token, // and continue on from the current token. Don't skip it. self.with_error(error); let pos = self.pos(); self.sc_mut().make_missing(pos) } } } fn require_name_or_variable(&mut self) -> S::Output { self.require_name_or_variable_or_error(Errors::error1050) } fn require_xhp_class_name_or_name_or_variable(&mut self) -> S::Output { if self.is_next_xhp_class_name() { let token = self.next_xhp_class_name(); self.sc_mut().make_token(token) } else { self.require_name_or_variable() } } fn require_name_allow_non_reserved(&mut self) -> S::Output { let mut parser1 = self.clone(); let token = parser1.next_token_non_reserved_as_name(); if token.kind() == TokenKind::Name { self.continue_from(parser1); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: Create a missing token for the expected token, // and continue on from the current token. Don't skip it. self.with_error(Errors::error1004); let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn next_xhp_category_name(&mut self) -> Token<S> { self.lexer_mut().next_xhp_category_name() } // We have a number of issues involving xhp class names, which begin with // a colon and may contain internal colons and dashes. These are some // helper methods to deal with them. fn is_next_name(&mut self) -> bool { self.lexer().is_next_name() } fn next_xhp_name(&mut self) -> Token<S> { assert!(self.is_next_name()); self.lexer_mut().next_xhp_name() } fn next_xhp_class_name(&mut self) -> Token<S> { assert!(self.is_next_xhp_class_name()); self.lexer_mut().next_xhp_class_name() } fn next_xhp_modifier_class_name(&mut self) -> Token<S> { self.lexer_mut().next_xhp_modifier_class_name() } fn require_xhp_name(&mut self) -> S::Output { if self.is_next_name() { let token = self.next_xhp_name(); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: Create a missing token for the expected token, // and continue on from the current token. Don't skip it. // TODO: Different error? self.with_error(Errors::error1004); let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn is_next_xhp_category_name(&mut self) -> bool { self.lexer().is_next_xhp_category_name() } fn parse_comma_list_allow_trailing<F>( &mut self, close_predicate: TokenKind, error: Error, parse_item: F, ) -> (S::Output, bool) where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list( TokenKind::Comma, SeparatedListKind::TrailingAllowed, close_predicate, error, parse_item, ) } fn parse_comma_list_allow_trailing_opt<F>( &mut self, close_predicate: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt( TokenKind::Comma, SeparatedListKind::TrailingAllowed, close_predicate, error, parse_item, ) } fn parse_separated_list_predicate<P, SP, F>( &mut self, separator_predicate: SP, list_kind: SeparatedListKind, close_predicate: P, error: Error, parse_item: F, ) -> (S::Output, bool, TokenKind) where P: Fn(TokenKind) -> bool, SP: Fn(TokenKind) -> bool, F: Fn(&mut Self) -> S::Output, { let mut items = vec![]; // Set this when we first see a separator let mut separator_kind = TokenKind::Empty; loop { // At this point we are expecting an item followed by a separator, // a close, or, if trailing separators are allowed, both let kind = self.peek_token_kind(); if close_predicate(kind) || kind == TokenKind::EndOfFile { // ERROR RECOVERY: We expected an item but we found a close or // the end of the file. Make the item and separator both // "missing" and give an error. // // If items are optional and we found a close, the last item was // omitted and there was no error. if kind == TokenKind::EndOfFile || list_kind != SeparatedListKind::ItemsOptional { self.with_error(error) }; let pos = self.pos(); let missing1 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing2 = self.sc_mut().make_missing(pos); let list_item = self.sc_mut().make_list_item(missing1, missing2); // TODO(T25649779) items.push(list_item); break; } else if separator_predicate(kind) { if separator_kind == TokenKind::Empty { separator_kind = kind; } else if separator_kind != kind { self.with_error(Errors::error1063); } // ERROR RECOVERY: We expected an item but we got a separator. // Assume the item was missing, eat the separator, and move on. // // If items are optional, there was no error, so eat the separator and // continue. // // TODO: This could be poor recovery. For example: // // function bar (Foo< , int blah) // // Plainly the type arg is missing, but the comma is not associated with // the type argument list, it's associated with the formal // parameter list. let token = self.next_token(); if list_kind != SeparatedListKind::ItemsOptional { self.with_error(error.clone()) } let pos = self.pos(); let item = self.sc_mut().make_missing(pos); let separator = self.sc_mut().make_token(token); let list_item = self.sc_mut().make_list_item(item, separator); // TODO(T25649779) items.push(list_item) } else { // We got neither a close nor a separator; hopefully we're going // to parse an item followed by a close or separator. let item = parse_item(self); let kind = self.peek_token_kind(); if close_predicate(kind) { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let list_item = self.sc_mut().make_list_item(item, missing); // TODO(T25649779) items.push(list_item); break; } else if separator_predicate(kind) { if separator_kind == TokenKind::Empty { separator_kind = kind; } else if separator_kind != kind { self.with_error(Errors::error1063); } let token = self.next_token(); let separator = self.sc_mut().make_token(token); let list_item = self.sc_mut().make_list_item(item, separator); // TODO(T25649779) items.push(list_item); let allow_trailing = list_kind != SeparatedListKind::NoTrailing; // We got an item followed by a separator; what if the thing // that comes next is a close? if allow_trailing && close_predicate(self.peek_token_kind()) { break; } } else { // ERROR RECOVERY: We were expecting a close or separator, but // got neither. Bail out. Caller will give an error. let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let list_item = self.sc_mut().make_list_item(item, missing); // TODO(T25649779) items.push(list_item); break; } } } let no_arg_is_missing = items.iter().all(|x| !x.is_missing()); let pos = self.pos(); let item_list = self.sc_mut().make_list(items, pos); (item_list, no_arg_is_missing, separator_kind) } fn parse_list_until_none<F>(&mut self, parse_item: F) -> S::Output where F: Fn(&mut Self) -> Option<S::Output>, { let mut acc = vec![]; loop { let maybe_item = parse_item(self); match maybe_item { None => break, Some(item) => { let is_missing = item.is_missing(); acc.push(item); if self.peek_token_kind() == TokenKind::EndOfFile || // exit if parser did not make any progress is_missing { break; } } } } let pos = self.pos(); self.sc_mut().make_list(acc, pos) } fn parse_separated_list_opt_predicate<P, F>( &mut self, separator_kind: TokenKind, allow_trailing: SeparatedListKind, close_predicate: P, error: Error, parse_item: F, ) -> S::Output where P: Fn(TokenKind) -> bool, F: Fn(&mut Self) -> S::Output, { let kind = self.peek_token_kind(); if close_predicate(kind) { let pos = self.pos(); self.sc_mut().make_missing(pos) } else { let (items, _, _) = self.parse_separated_list_predicate( |x| x == separator_kind, allow_trailing, close_predicate, error, parse_item, ); items } } fn is_next_xhp_class_name(&self) -> bool { self.lexer().is_next_xhp_class_name() } fn next_xhp_modifier_class_name_or_other_token(&mut self) -> Token<S> { if self.is_next_name() { self.next_xhp_modifier_class_name() } else { self.next_token() } } fn next_xhp_class_name_or_other_token(&mut self) -> Token<S> { if self.is_next_xhp_class_name() { self.next_xhp_class_name() } else { self.next_token() } } fn parse_separated_list_opt<F>( &mut self, separator_kind: TokenKind, allow_trailing: SeparatedListKind, close_kind: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt_predicate( separator_kind, allow_trailing, |x| x == close_kind, error, parse_item, ) } fn parse_comma_list_opt_allow_trailing<F>( &mut self, close_kind: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt( TokenKind::Comma, SeparatedListKind::TrailingAllowed, close_kind, error, parse_item, ) } fn parse_comma_list_opt<F>( &mut self, close_kind: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt( TokenKind::Comma, SeparatedListKind::NoTrailing, close_kind, error, parse_item, ) } fn parse_comma_list_opt_items_opt<F>( &mut self, close_kind: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt( TokenKind::Comma, SeparatedListKind::ItemsOptional, close_kind, error, parse_item, ) } fn parse_comma_list_opt_allow_trailing_predicate<P, F>( &mut self, close_kind: P, error: Error, parse_item: F, ) -> S::Output where P: Fn(TokenKind) -> bool, F: Fn(&mut Self) -> S::Output, { self.parse_separated_list_opt_predicate( TokenKind::Comma, SeparatedListKind::TrailingAllowed, close_kind, error, parse_item, ) } fn parse_comma_list<F>( &mut self, close_kind: TokenKind, error: Error, parse_item: F, ) -> S::Output where F: Fn(&mut Self) -> S::Output, { let (items, _) = self.parse_separated_list( TokenKind::Comma, SeparatedListKind::NoTrailing, close_kind, error, parse_item, ); items } fn parse_delimited_list<P>( &mut self, left_kind: TokenKind, left_error: Error, right_kind: TokenKind, right_error: Error, parse_items: P, ) -> (S::Output, S::Output, S::Output) where P: FnOnce(&mut Self) -> S::Output, { let left = self.require_token(left_kind, left_error); let items = parse_items(self); let right = self.require_token(right_kind, right_error); (left, items, right) } fn parse_braced_list<P>(&mut self, parse_items: P) -> (S::Output, S::Output, S::Output) where P: FnOnce(&mut Self) -> S::Output, { self.parse_delimited_list( TokenKind::LeftBrace, Errors::error1034, TokenKind::RightBrace, Errors::error1006, parse_items, ) } fn parse_parenthesized_list<F>(&mut self, parse_items: F) -> (S::Output, S::Output, S::Output) where F: FnOnce(&mut Self) -> S::Output, { self.parse_delimited_list( TokenKind::LeftParen, Errors::error1019, TokenKind::RightParen, Errors::error1011, parse_items, ) } fn parse_parenthesized_comma_list<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |x: &mut Self| x.parse_comma_list(TokenKind::RightParen, Errors::error1011, parse_item); self.parse_parenthesized_list(parse_items) } fn parse_parenthesized_comma_list_opt_allow_trailing<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |x: &mut Self| { x.parse_comma_list_opt_allow_trailing( TokenKind::RightParen, Errors::error1011, parse_item, ) }; self.parse_parenthesized_list(parse_items) } fn parse_parenthesized_comma_list_opt_items_opt<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |x: &mut Self| { x.parse_comma_list_opt_items_opt(TokenKind::RightParen, Errors::error1011, parse_item) }; self.parse_parenthesized_list(parse_items) } fn parse_braced_comma_list_opt_allow_trailing<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |parser: &mut Self| { parser.parse_comma_list_opt_allow_trailing( TokenKind::RightBrace, Errors::error1006, parse_item, ) }; self.parse_braced_list(parse_items) } fn parse_bracketted_list<F>(&mut self, parse_items: F) -> (S::Output, S::Output, S::Output) where F: FnOnce(&mut Self) -> S::Output, { self.parse_delimited_list( TokenKind::LeftBracket, Errors::error1026, TokenKind::RightBracket, Errors::error1031, parse_items, ) } fn parse_bracketted_comma_list_opt_allow_trailing<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |x: &mut Self| { x.parse_comma_list_opt_allow_trailing( TokenKind::RightBracket, Errors::error1031, parse_item, ) }; self.parse_bracketted_list(parse_items) } fn parse_double_angled_list<F>(&mut self, parse_items: F) -> (S::Output, S::Output, S::Output) where F: FnOnce(&mut Self) -> S::Output, { self.parse_delimited_list( TokenKind::LessThanLessThan, Errors::error1029, TokenKind::GreaterThanGreaterThan, Errors::error1029, parse_items, ) } fn parse_double_angled_comma_list_allow_trailing<F>( &mut self, parse_item: F, ) -> (S::Output, S::Output, S::Output) where F: Fn(&mut Self) -> S::Output, { let parse_items = |x: &mut Self| { let (items, _) = x.parse_comma_list_allow_trailing( TokenKind::GreaterThanGreaterThan, Errors::error1029, parse_item, ); items }; self.parse_double_angled_list(parse_items) } fn scan_remaining_qualified_name(&mut self, name_token: S::Output) -> S::Output { let (name, _) = self.scan_remaining_qualified_name_extended(name_token); name } // Parse with parse_item while a condition is met. fn parse_list_while<F, P>(&mut self, mut parse_item: F, predicate: P) -> S::Output where F: FnMut(&mut Self) -> S::Output, P: Fn(&Self) -> bool, { let mut items = vec![]; loop { if self.peek_token_kind() == TokenKind::EndOfFile || !predicate(self) { break; }; let lexer_before = self.lexer().clone(); let result = parse_item(self); if result.is_missing() { // ERROR RECOVERY: If the item is was parsed as 'missing', then it means // the parser bailed out of that scope. So, pass on whatever's been // accumulated so far, but with a 'Missing' SyntaxNode prepended. items.push(result); break; } if lexer_before.start() == self.lexer().start() && lexer_before.offset() == self.lexer().offset() { // INFINITE LOOP PREVENTION: If parse_item does not actually make // progress, just bail items.push(result); break; } // Or if nothing's wrong, continue. items.push(result) } let pos = self.pos(); self.sc_mut().make_list(items, pos) } fn parse_terminated_list<F>(&mut self, parse_item: F, terminator: TokenKind) -> S::Output where F: FnMut(&mut Self) -> S::Output, { let predicate = |x: &Self| x.peek_token_kind() != terminator; self.parse_list_while(parse_item, predicate) } fn skip_and_log_unexpected_token(&mut self, generate_error: bool) { if generate_error { let extra_str = &self.current_token_text(); self.with_error_on_whole_token(Errors::error1057(extra_str)) }; let token = self.next_token(); self.add_skipped_token(token) } // Returns true if the strings underlying two tokens are of the same length // but with one character different. fn one_character_different<'b>(str1: &'b [u8], str2: &'b [u8]) -> bool { if str1.len() != str2.len() { false } else { // both strings have same length let str_len = str1.len(); for i in 0..str_len { if str1[i] != str2[i] { // Allow only one mistake return str1[i + 1..] == str2[i + 1..]; } } true } } // Compare the text of the token we have in hand to the text of the // anticipated kind. Note: this automatically returns false for any // TokenKinds of length 1. fn is_misspelled_kind(kind: TokenKind, token_str: &str) -> bool { let tokenkind_str = kind.to_string().as_bytes(); let token_str = token_str.as_bytes(); if tokenkind_str.len() <= 1 { false } else { Self::one_character_different(tokenkind_str, token_str) } } fn is_misspelled_from<'b>(kind_list: &[TokenKind], token_str: &'b str) -> bool { kind_list .iter() .any(|x| Self::is_misspelled_kind(*x, token_str)) } // If token_str is a misspelling (by our narrow definition of misspelling) // of a TokenKind from kind_list, return the TokenKind that token_str is a // misspelling of. Otherwise, return None. fn suggested_kind_from(kind_list: &[TokenKind], token_str: &str) -> Option<TokenKind> { kind_list.iter().find_map(|x| { if Self::is_misspelled_kind(*x, token_str) { Some(*x) } else { None } }) } fn skip_and_log_misspelled_token(&mut self, required_kind: TokenKind) { let received_str = &self.current_token_text(); let required_str = required_kind.to_string(); self.with_error_on_whole_token(Errors::error1058(received_str, required_str)); self.skip_and_log_unexpected_token(/* generate_error:*/ false) } fn require_token_one_of(&mut self, kinds: &[TokenKind], error: Error) -> S::Output { let token_kind = self.peek_token_kind(); if kinds.iter().any(|x| *x == token_kind) { let token = self.next_token(); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: Look at the next token after this. Is it the one we // require? If so, process the current token as extra and return the next // one. Otherwise, create a missing token for what we required, // and continue on from the current token (don't skip it). let next_kind = self.peek_token_kind_with_lookahead(1); if kinds.iter().any(|x| *x == next_kind) { self.skip_and_log_unexpected_token(true); let token = self.next_token(); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: We know we didn't encounter an extra token. // So, as a second line of defense, check if the current token // is a misspelling, by our existing narrow definition of misspelling. let is_misspelling = |k: &&TokenKind| Self::is_misspelled_kind(**k, self.current_token_text()); let kind = kinds.iter().find(is_misspelling); match kind { Some(kind) => { self.skip_and_log_misspelled_token(*kind); let pos = self.pos(); self.sc_mut().make_missing(pos) } None => { self.with_error(error); let pos = self.pos(); self.sc_mut().make_missing(pos) } } } } } fn require_token(&mut self, kind: TokenKind, error: Error) -> S::Output { // Must behave as `require_token_one_of parser [kind] error` if self.peek_token_kind() == kind { let token = self.next_token(); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: Look at the next token after this. Is it the one we // require? If so, process the current token as extra and return the next // one. Otherwise, create a missing token for what we required, // and continue on from the current token (don't skip it). let next_kind = self.peek_token_kind_with_lookahead(1); if next_kind == kind { self.skip_and_log_unexpected_token(true); let token = self.next_token(); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: We know we didn't encounter an extra token. // So, as a second line of defense, check if the current token // is a misspelling, by our existing narrow definition of misspelling. if Self::is_misspelled_kind(kind, self.current_token_text()) { self.skip_and_log_misspelled_token(kind); let pos = self.pos(); self.sc_mut().make_missing(pos) } else { self.with_error(error); let pos = self.pos(); self.sc_mut().make_missing(pos) } } } } fn require_and_return_token(&mut self, kind: TokenKind, error: Error) -> Option<Token<S>> { if self.peek_token_kind() == kind { Some(self.next_token()) } else { // ERROR RECOVERY: Look at the next token after this. Is it the one we // require? If so, process the current token as extra and return the next // one. Otherwise, create a missing token for what we required, // and continue on from the current token (don't skip it). let next_kind = self.peek_token_kind_with_lookahead(1); if next_kind == kind { self.skip_and_log_unexpected_token(true); Some(self.next_token()) } else { // ERROR RECOVERY: We know we didn't encounter an extra token. // So, as a second line of defense, check if the current token // is a misspelling, by our existing narrow definition of misspelling. if Self::is_misspelled_kind(kind, self.current_token_text()) { self.skip_and_log_misspelled_token(kind); None } else { self.with_error(error); None } } } } fn require_name_allow_all_keywords(&mut self) -> S::Output { let mut parser1 = self.clone(); let token = parser1.next_token_as_name(); if token.kind() == TokenKind::Name { self.continue_from(parser1); self.sc_mut().make_token(token) } else { // ERROR RECOVERY: Create a missing token for the expected token, // and continue on from the current token. Don't skip it. self.with_error(Errors::error1004); let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn require_right_paren(&mut self) -> S::Output { self.require_token(TokenKind::RightParen, Errors::error1011) } fn require_semicolon_token(&mut self, saw_type_name: bool) -> Option<Token<S>> { match self.peek_token_kind() { TokenKind::Variable if saw_type_name => self .require_and_return_token(TokenKind::Semicolon, Errors::local_variable_with_type), _ => self.require_and_return_token(TokenKind::Semicolon, Errors::error1010), } } fn require_semicolon(&mut self) -> S::Output { self.require_token(TokenKind::Semicolon, Errors::error1010) } }
Rust
hhvm/hphp/hack/src/parser/core/pattern_parser.rs
// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_error::{self as Errors}; use parser_core_types::token_kind::TokenKind; use crate::lexer::Lexer; use crate::parser_env::ParserEnv; use crate::parser_trait::Context; use crate::parser_trait::ParserTrait; use crate::smart_constructors::NodeType; use crate::smart_constructors::SmartConstructors; use crate::smart_constructors::Token; use crate::type_parser::TypeParser; #[derive(Clone)] pub struct PatternParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, } impl<'a, S> ParserTrait<'a, S> for PatternParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn make( lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, ) -> Self { Self { lexer, env, context, errors, sc, } } fn into_parts( self, ) -> ( Lexer<'a, S::Factory>, Context<Token<S>>, Vec<SyntaxError>, S, ) { (self.lexer, self.context, self.errors, self.sc) } fn lexer(&self) -> &Lexer<'a, S::Factory> { &self.lexer } fn lexer_mut(&mut self) -> &mut Lexer<'a, S::Factory> { &mut self.lexer } fn continue_from<P: ParserTrait<'a, S>>(&mut self, other: P) { let (lexer, context, errors, sc) = other.into_parts(); self.lexer = lexer; self.context = context; self.errors = errors; self.sc = sc; } fn add_error(&mut self, error: SyntaxError) { self.errors.push(error) } fn env(&self) -> &ParserEnv { &self.env } fn sc_mut(&mut self) -> &mut S { &mut self.sc } fn drain_skipped_tokens(&mut self) -> std::vec::Drain<'_, Token<S>> { self.context.skipped_tokens.drain(..) } fn skipped_tokens(&self) -> &[Token<S>] { &self.context.skipped_tokens } fn context_mut(&mut self) -> &mut Context<Token<S>> { &mut self.context } fn context(&self) -> &Context<Token<S>> { &self.context } } impl<'a, S> PatternParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn with_type_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut TypeParser<'a, S>) -> U, { let mut type_parser: TypeParser<'_, S> = TypeParser::make( self.lexer.clone(), self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut type_parser); self.continue_from(type_parser); res } fn parse_type_specifier(&mut self) -> S::Output { self.with_type_parser(|x: &mut TypeParser<'a, S>| { let allow_var = false; let allow_attr = false; x.parse_type_specifier(allow_var, allow_attr) }) } pub fn parse_pattern(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Variable => self.parse_variable_or_refinement_pattern(), TokenKind::Name => self.parse_constructor_or_refinement_pattern(), _ => { // ERROR RECOVERY: when encountering an invalid token, make the // whole pattern missing and continue on, starting at the // unexpected token. self.with_error(Errors::expected_pattern); self.sc.make_missing(self.pos()) } } } fn parse_variable_or_refinement_pattern(&mut self) -> S::Output { let variable = self.assert_token(TokenKind::Variable); match self.peek_token_kind() { TokenKind::Colon => self.parse_refinement_pattern(variable), _ => self.sc.make_variable_pattern(variable), } } fn parse_constructor_or_refinement_pattern(&mut self) -> S::Output { let name = self.assert_token(TokenKind::Name); let name = self.scan_remaining_qualified_name(name); match self.peek_token_kind() { // NB: This is only a valid refinement pattern if `name` is a `Name` // token beginning with an underscore character (i.e., a wildcard). // If it isn't, we emit an error in a later pass. TokenKind::Colon => self.parse_refinement_pattern(name), _ => self.parse_constructor_pattern(name), } } fn parse_constructor_pattern(&mut self, name: S::Output) -> S::Output { // SPEC: // // constructor-pattern: // name args-opt // qualified-name args-opt // // args: // ( pattern-list-opt ) // // Wildcard patterns (e.g., `match $x { _ => ... } }`) are parsed as // constructor patterns here. We transform constructor patterns where // the name is a single `Name` token beginning with an underscore // character into wildcard patterns during lowering. let (left, items, right) = if self.peek_token_kind() == TokenKind::LeftParen { self.parse_parenthesized_comma_list_opt_items_opt(Self::parse_pattern) } else { ( self.sc.make_missing(self.pos()), self.sc.make_missing(self.pos()), self.sc.make_missing(self.pos()), ) }; self.sc.make_constructor_pattern(name, left, items, right) } fn parse_refinement_pattern(&mut self, variable: S::Output) -> S::Output { // SPEC: // // refinement-pattern: // variable-name : type-specifier // name : type-specifier // // The `name : type-specifier` form is only legal when the name token // begins with an underscore (i.e., it's a wildcard). We emit the error // in a later pass. let colon = self.assert_token(TokenKind::Colon); let type_specifier = self.parse_type_specifier(); self.sc .make_refinement_pattern(variable, colon, type_specifier) } }
Rust
hhvm/hphp/hack/src/parser/core/statement_parser.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use parser_core_types::lexable_token::LexableToken; use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_error::{self as Errors}; use parser_core_types::token_kind::TokenKind; use parser_core_types::trivia_kind::TriviaKind; use crate::declaration_parser::DeclarationParser; use crate::expression_parser::ExpressionParser; use crate::lexer::Lexer; use crate::parser_env::ParserEnv; use crate::parser_trait::Context; use crate::parser_trait::ExpectedTokens; use crate::parser_trait::ParserTrait; use crate::pattern_parser::PatternParser; use crate::smart_constructors::NodeType; use crate::smart_constructors::SmartConstructors; use crate::smart_constructors::Token; use crate::type_parser::TypeParser; #[derive(Clone)] pub struct StatementParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, } impl<'a, S> ParserTrait<'a, S> for StatementParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn make( lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, ) -> Self { Self { lexer, env, context, errors, sc, } } fn into_parts( self, ) -> ( Lexer<'a, S::Factory>, Context<Token<S>>, Vec<SyntaxError>, S, ) { (self.lexer, self.context, self.errors, self.sc) } fn lexer(&self) -> &Lexer<'a, S::Factory> { &self.lexer } fn lexer_mut(&mut self) -> &mut Lexer<'a, S::Factory> { &mut self.lexer } fn continue_from<P: ParserTrait<'a, S>>(&mut self, other: P) { let (lexer, context, errors, sc) = other.into_parts(); self.lexer = lexer; self.context = context; self.errors = errors; self.sc = sc; } fn add_error(&mut self, error: SyntaxError) { self.errors.push(error) } fn env(&self) -> &ParserEnv { &self.env } fn sc_mut(&mut self) -> &mut S { &mut self.sc } fn drain_skipped_tokens(&mut self) -> std::vec::Drain<'_, Token<S>> { self.context.skipped_tokens.drain(..) } fn skipped_tokens(&self) -> &[Token<S>] { &self.context.skipped_tokens } fn context_mut(&mut self) -> &mut Context<Token<S>> { &mut self.context } fn context(&self) -> &Context<Token<S>> { &self.context } } impl<'a, S> StatementParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn with_type_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut TypeParser<'a, S>) -> U, { let mut type_parser: TypeParser<'_, S> = TypeParser::make( self.lexer.clone(), self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut type_parser); self.continue_from(type_parser); res } fn parse_type_specifier(&mut self) -> S::Output { self.with_type_parser(|x: &mut TypeParser<'a, S>| x.parse_type_specifier(false, true)) } fn with_expression_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut ExpressionParser<'a, S>) -> U, { let mut expression_parser: ExpressionParser<'_, S> = ExpressionParser::make( self.lexer.clone(), self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut expression_parser); self.continue_from(expression_parser); res } fn with_decl_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut DeclarationParser<'a, S>) -> U, { let mut decl_parser: DeclarationParser<'_, S> = DeclarationParser::make( self.lexer.clone(), self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut decl_parser); self.continue_from(decl_parser); res } fn with_pattern_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut PatternParser<'a, S>) -> U, { let mut pattern_parser: PatternParser<'_, S> = PatternParser::make( self.lexer.clone(), self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut pattern_parser); self.continue_from(pattern_parser); res } pub fn parse_statement(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Async | TokenKind::Function => { self.parse_possible_php_function(/* toplevel:*/ false) } TokenKind::Abstract | TokenKind::Final | TokenKind::Interface | TokenKind::Trait | TokenKind::XHP | TokenKind::Class => { self.with_error(Errors::decl_outside_global_scope); self.with_decl_parser(|x| { let pos = x.pos(); let missing = x.sc_mut().make_missing(pos); x.parse_classish_declaration(missing) }) } TokenKind::Fallthrough => self.parse_possible_erroneous_fallthrough(), TokenKind::For => self.parse_for_statement(), TokenKind::Foreach => self.parse_foreach_statement(), TokenKind::Do => self.parse_do_statement(), TokenKind::While => self.parse_while_statement(), TokenKind::Using => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); self.parse_using_statement(missing) } TokenKind::Await if self.peek_token_kind_with_lookahead(1) == TokenKind::Using => { let await_kw = self.assert_token(TokenKind::Await); self.parse_using_statement(await_kw) } TokenKind::If => self.parse_if_statement(), TokenKind::Switch => self.parse_switch_statement(), TokenKind::Match => self.parse_match_statement(), TokenKind::Try => self.parse_try_statement(), TokenKind::Break => self.parse_break_statement(), TokenKind::Continue => self.parse_continue_statement(), TokenKind::Return => self.parse_return_statement(), TokenKind::Yield => { match self.peek_token_kind_with_lookahead(1) { // yield break; TokenKind::Break => self.parse_yield_break_statement(), // yield; // yield some_expression; _ => self.parse_expression_statement(), } } TokenKind::Throw => self.parse_throw_statement(), TokenKind::LeftBrace => self.parse_compound_statement(), TokenKind::Static => self.parse_expression_statement(), TokenKind::Echo => self.parse_echo_statement(), TokenKind::Concurrent => self.parse_concurrent_statement(), TokenKind::Unset => self.parse_unset_statement(), TokenKind::Let => self.parse_declare_local_statement(), TokenKind::Case => { let result = self.parse_case_label(); // TODO: This puts the error in the wrong place. We should highlight // the entire label, not the trailing colon. self.with_error(Errors::error2003); result } TokenKind::Default => { let result = self.parse_default_label(); // TODO: This puts the error in the wrong place. We should highlight // the entire label, not the trailing colon. self.with_error(Errors::error2004); result } TokenKind::Semicolon => self.parse_expression_statement(), // ERROR RECOVERY: when encountering a token that's invalid now but the // context says is expected later, make the whole statement missing // and continue on, starting at the unexpected token. // TODO T20390825: Make sure this this won't cause premature recovery. kind if self.expects(kind) => { let pos = self.pos(); self.sc_mut().make_missing(pos) } _ => self.parse_expression_statement(), } } pub fn parse_header(&mut self) -> (S::Output, bool) { let (markup, suffix_opt) = self.lexer.scan_header(); let (suffix, has_suffix) = match suffix_opt { Some((less_than_question, language_opt)) => { let less_than_question_token = self.sc_mut().make_token(less_than_question); let language = match language_opt { Some(language) => { let token = self.sc_mut().make_token(language); token } None => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); missing } }; let suffix = self .sc_mut() .make_markup_suffix(less_than_question_token, language); (suffix, true) } None => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); (missing, false) } }; let s = match (markup, has_suffix) { (Some(markup), _) => { let markup = self.sc_mut().make_token(markup); self.sc_mut().make_markup_section(markup, suffix) } (None, false) => { let pos = self.pos(); self.sc_mut().make_missing(pos) } (None, true) => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); self.sc_mut().make_markup_section(missing, suffix) } }; (s, has_suffix) } pub fn parse_possible_php_function(&mut self, toplevel: bool) -> S::Output { // ERROR RECOVERY: PHP supports nested named functions, but Hack does not. // (Hack only supports anonymous nested functions as expressions.) // // If we have a statement beginning with function left-paren, then parse it // as a statement expression beginning with an anonymous function; it will // then have to end with a semicolon. // // If it starts with something else, parse it as a function. // // TODO: Give an error for nested nominal functions in a later pass. let kind0 = self.peek_token_kind_with_lookahead(0); let kind1 = self.peek_token_kind_with_lookahead(1); match (kind0, kind1) { | (TokenKind::Async, TokenKind::Function) if self.peek_token_kind_with_lookahead(2) == TokenKind::LeftParen => self.parse_expression_statement(), | (TokenKind::Function, TokenKind::LeftParen) // Verbose-style lambda // Async, compact-style lambda | (TokenKind::Async, TokenKind::LeftParen) | (TokenKind::Async, TokenKind::LeftBrace) // Async block => self.parse_expression_statement(), | _ => { let missing = self.with_decl_parser(|x: &mut DeclarationParser<'a, S>| { let pos = x.pos(); let missing = x.sc_mut().make_missing(pos); x.parse_function_declaration(missing) }); if !toplevel { self.with_error(Errors::inline_function_def) }; missing } } } // Helper: parses ( expr ) fn parse_paren_expr(&mut self) -> (S::Output, S::Output, S::Output) { let left_paren = self.require_left_paren(); let expr_syntax = self.parse_expression(); let right_paren = self.require_right_paren(); (left_paren, expr_syntax, right_paren) } fn parse_for_statement(&mut self) -> S::Output { // SPEC // for-statement: // for ( for-initializer-opt ; for-control-opt ; \ // for-end-of-loop-opt ) statement // // The initialize and end-of-loop clauses are optional, // comma-separated lists of expressions. The control clause is // an optional single expression. // // Note that unlike most such lists in Hack, it may *not* have a trailing // comma. // // TODO: There is no compelling reason to not allow a trailing comma // from the grammatical point of view. Each clause unambiguously ends in // either a semi or a paren, so we can allow a trailing comma without // difficulty. let for_keyword_token = self.assert_token(TokenKind::For); let for_left_paren = self.require_left_paren(); let for_initializer_expr = self.parse_comma_list_opt(TokenKind::Semicolon, Errors::error1015, |x| { x.parse_expression() }); let for_first_semicolon = self.require_semicolon(); let for_control_expr = match self.peek_token_kind() { TokenKind::Semicolon => { let pos = self.pos(); self.sc_mut().make_missing(pos) } _ => self.parse_expression(), }; let for_second_semicolon = self.require_semicolon(); let for_end_of_loop_expr = self.parse_comma_list_opt(TokenKind::RightParen, Errors::error1015, |x| { x.parse_expression() }); let for_right_paren = self.require_right_paren(); let for_statement = self.parse_statement(); self.sc_mut().make_for_statement( for_keyword_token, for_left_paren, for_initializer_expr, for_first_semicolon, for_control_expr, for_second_semicolon, for_end_of_loop_expr, for_right_paren, for_statement, ) } fn parse_foreach_statement(&mut self) -> S::Output { let foreach_keyword_token = self.assert_token(TokenKind::Foreach); let foreach_left_paren = self.require_left_paren(); self.expect_in_new_scope(ExpectedTokens::RightParen); let foreach_collection_name = self.with_expression_parser(|x: &mut ExpressionParser<'a, S>| { x.with_as_expressions(false, |x| x.parse_expression()) }); let await_token = self.optional_token(TokenKind::Await); let as_token = self.require_as(); let mut parser1 = self.clone(); let after_as = parser1.parse_expression(); let (foreach_key, foreach_arrow, foreach_value) = match parser1.peek_token_kind() { TokenKind::RightParen => { let pos = self.pos(); let missing1 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing2 = self.sc_mut().make_missing(pos); let value = self.parse_expression(); (missing1, missing2, value) } TokenKind::EqualGreaterThan => { self.continue_from(parser1); let arrow = self.assert_token(TokenKind::EqualGreaterThan); let value = self.parse_expression(); (after_as, arrow, value) } _ => { self.continue_from(parser1); self.with_error(Errors::invalid_foreach_element); let token = self.fetch_token(); let error = self.sc_mut().make_error(token); let foreach_value = self.parse_expression(); (after_as, error, foreach_value) } }; let right_paren_token = self.require_right_paren(); self.pop_scope(ExpectedTokens::RightParen); let foreach_statement = self.parse_statement(); self.sc_mut().make_foreach_statement( foreach_keyword_token, foreach_left_paren, foreach_collection_name, await_token, as_token, foreach_key, foreach_arrow, foreach_value, right_paren_token, foreach_statement, ) } fn parse_do_statement(&mut self) -> S::Output { let do_keyword_token = self.assert_token(TokenKind::Do); let statement_node = self.parse_statement(); let do_while_keyword_token = self.require_while(); let (left_paren_token, expr_node, right_paren_token) = self.parse_paren_expr(); let do_semicolon_token = self.require_semicolon(); self.sc_mut().make_do_statement( do_keyword_token, statement_node, do_while_keyword_token, left_paren_token, expr_node, right_paren_token, do_semicolon_token, ) } fn parse_while_statement(&mut self) -> S::Output { let while_keyword_token = self.assert_token(TokenKind::While); let (left_paren_token, expr_node, right_paren_token) = self.parse_paren_expr(); let statement_node = self.parse_statement(); self.sc_mut().make_while_statement( while_keyword_token, left_paren_token, expr_node, right_paren_token, statement_node, ) } // SPEC: // using-statement: // await-opt using expression ; // await-opt using ( expression-list ) compound-statement // // TODO: Update the specification of the grammar fn parse_using_statement(&mut self, await_kw: S::Output) -> S::Output { let using_kw = self.assert_token(TokenKind::Using); // Decision point - Are we at a function scope or a body scope let token_kind = self.peek_token_kind(); // if next token is left paren it can be either // - parenthesized expression followed by semicolon for function scoped using // - comma separated list of expressions wrapped in parens for blocks. // To distinguish between then try parse parenthesized expression and then // check next token. NOTE: we should not use 'parse_expression' here // since it might parse (expr) { smth() } as subscript expression $expr{$index} // let mut parser1 = self.clone(); let expr = if token_kind == TokenKind::LeftParen { parser1.with_expression_parser(|p: &mut ExpressionParser<'a, S>| { p.parse_cast_or_parenthesized_or_lambda_expression() }) } else { parser1.parse_expression() }; let token = parser1.next_token(); match token.kind() { TokenKind::Semicolon => { self.continue_from(parser1); let semi = self.sc_mut().make_token(token); self.sc_mut() .make_using_statement_function_scoped(await_kw, using_kw, expr, semi) } _ => { let left_paren = self.require_left_paren(); let expressions = self.parse_comma_list(TokenKind::RightParen, Errors::error1015, |x| { x.parse_expression() }); let right_paren = self.require_right_paren(); let statements = self.parse_statement(); self.sc_mut().make_using_statement_block_scoped( await_kw, using_kw, left_paren, expressions, right_paren, statements, ) } } } fn parse_unset_statement(&mut self) -> S::Output { // TODO: This is listed as unsupported in Hack in the spec; is that true? // TODO: If it is formally supported in Hack then update the spec; if not // TODO: then should we make it illegal in strict mode? // TODO: Can the list be comma-terminated? // TODO: Can the list be empty? // TODO: The list has to be expressions which evaluate as variables; // add an error checking pass. // TODO: TokenKind::Unset is case-insentive. Should non-lowercase be an error? let keyword = self.assert_token(TokenKind::Unset); let (left_paren, variables, right_paren) = self.parse_parenthesized_comma_list_opt_allow_trailing(|x| x.parse_expression()); let semi = self.require_semicolon(); self.sc_mut() .make_unset_statement(keyword, left_paren, variables, right_paren, semi) } // parses the "( expr ) statement" segment of If, Elseif or Else clauses. fn parse_if_body_helper(&mut self) -> (S::Output, S::Output, S::Output, S::Output) { let (left_paren_token, expr_node, right_paren_token) = self.parse_paren_expr(); let statement_node = self.parse_statement(); ( left_paren_token, expr_node, right_paren_token, statement_node, ) } // do not eat token and return Missing if first token is not Else fn parse_else_opt(&mut self) -> S::Output { let else_token = self.optional_token(TokenKind::Else); if else_token.is_missing() { else_token } else { let else_consequence = self.parse_statement(); self.sc_mut().make_else_clause(else_token, else_consequence) } } fn parse_if_statement(&mut self) -> S::Output { // SPEC: // if-statement: // if ( expression ) statement else-clause-opt // // else-clause: // else statement let if_keyword_token = self.assert_token(TokenKind::If); let (if_left_paren, if_expr, if_right_paren, if_consequence) = self.parse_if_body_helper(); let else_syntax = self.parse_else_opt(); self.sc_mut().make_if_statement( if_keyword_token, if_left_paren, if_expr, if_right_paren, if_consequence, else_syntax, ) } fn parse_switch_statement(&mut self) -> S::Output { // SPEC: // // The spec for switches is very simple: // // switch-statement: // switch ( expression ) compound-statement // labeled-statement: // case-label // default-label // case-label: // case expression : statement // default-label: // default : statement // // where the compound statement, if not empty, must consist of only labeled // statements. // // These rules give a nice simple parse but it has some unfortunate properties. // Consider: // // switch (foo) // { // case 1: // case 2: // break; // default: // break; // } // // What's the parse of the compound statement contents based on that grammar? // // case 1: // case 2: // break; // default: // break; // // That is, the second case is a child of the first. That makes it harder // to write analyzers, it makes it harder to write pretty printers, and so on. // // What do we really want here? We want a switch to be a collection of // *sections* where each section has one or more *labels* and zero or more // *statements*. // // switch-statement: // switch ( expression ) { switch-sections-opt } // // switch-sections: // switch-section // switch-sections switch-section // // switch-section: // section-labels // section-statements-opt // section-fallthrough-opt // // section-fallthrough: // fallthrough ; // // section-labels: // section-label // section-labels section-label // // section-statements: // statement // section-statements statement // // The parsing of course has to be greedy; we never want to say that there // are zero statements *between* two sections. // // TODO: Update the specification with these rules. let switch_keyword_token = self.assert_token(TokenKind::Switch); let (left_paren_token, expr_node, right_paren_token) = self.parse_paren_expr(); let left_brace_token = self.require_left_brace(); let section_list = { let list = self.parse_terminated_list(|x| x.parse_switch_section(), TokenKind::RightBrace); if list.is_missing() { self.with_error(Errors::empty_switch_cases); let pos = self.pos(); self.sc_mut().make_missing(pos) } else { list } }; let right_brace_token = self.require_right_brace(); self.sc_mut().make_switch_statement( switch_keyword_token, left_paren_token, expr_node, right_paren_token, left_brace_token, section_list, right_brace_token, ) } fn is_switch_fallthrough(&self) -> bool { self.peek_token_kind() == TokenKind::Fallthrough && self.peek_token_kind_with_lookahead(1) == TokenKind::Semicolon } fn parse_possible_erroneous_fallthrough(&mut self) -> S::Output { if self.is_switch_fallthrough() { self.with_error_on_whole_token(Errors::error1055); self.parse_switch_fallthrough() } else { self.parse_expression_statement() } } fn parse_switch_fallthrough(&mut self) -> S::Output { // We don't get here unless we have fallthrough ; let keyword = self.assert_token(TokenKind::Fallthrough); let semi = self.assert_token(TokenKind::Semicolon); self.sc_mut().make_switch_fallthrough(keyword, semi) } fn parse_switch_fallthrough_opt(&mut self) -> S::Output { if self.is_switch_fallthrough() { self.parse_switch_fallthrough() } else { // As long as we have FALLTHROUGH comments, insert a faux-statement as if // there was a fallthrough statement. For example, the code // // > case 22: // > $x = 0; // > // FALLTHROUGH because we want all the other functionality as well // > case 42: // > foo($x); // > break; // // Should be parsed as if it were // // > case 22: // > $x = 0; // > // FALLTHROUGH because we want all the other functionality as well // > fallthrough; // > case 43: // > foo($x); // > break; // // But since we have no actual occurrence (i.e. no position, no string) of // that `fallthrough;` statement, we construct a `switch_fallthrough`, but // fill it with `missing`. let next = self.peek_token(); let commented_fallthrough = next.has_leading_trivia_kind(TriviaKind::FallThrough); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); if commented_fallthrough { let pos = self.pos(); let missing1 = self.sc_mut().make_missing(pos); self.sc_mut().make_switch_fallthrough(missing, missing1) } else { missing } } } fn parse_switch_section(&mut self) -> S::Output { // See parse_switch_statement for grammar let labels = self.parse_list_until_none(|x| x.parse_switch_section_label()); if labels.is_missing() { self.with_error(Errors::error2008); }; let statements = self.parse_list_until_none(|x| x.parse_switch_section_statement()); let fallthrough = self.parse_switch_fallthrough_opt(); self.sc_mut() .make_switch_section(labels, statements, fallthrough) } fn parse_switch_section_statement(&mut self) -> Option<S::Output> { if self.is_switch_fallthrough() { None } else { match self.peek_token_kind() { TokenKind::Default | TokenKind::Case | TokenKind::RightBrace | TokenKind::EndOfFile => None, _ => { let statement = self.parse_statement(); Some(statement) } } } } fn parse_switch_section_label(&mut self) -> Option<S::Output> { // See the grammar under parse_switch_statement match self.peek_token_kind() { TokenKind::Case => { let label = self.parse_case_label(); Some(label) } TokenKind::Default => { let label = self.parse_default_label(); Some(label) } _ => None, } } fn parse_match_statement(&mut self) -> S::Output { // SPEC: // // match-statement: // match ( expression ) { match-statement-arms-opt } // // match-statement-arms: // match-statement-arm // match-statement-arms match-statement-arm let match_keyword_token = self.assert_token(TokenKind::Match); let (left_paren_token, expr_node, right_paren_token) = self.parse_paren_expr(); let left_brace_token = self.require_left_brace(); let match_arms = self.parse_terminated_list(|x| x.parse_match_statement_arm(), TokenKind::RightBrace); if match_arms.is_missing() { self.with_error(Errors::empty_match_statement); } let right_brace_token = self.require_right_brace(); self.sc_mut().make_match_statement( match_keyword_token, left_paren_token, expr_node, right_paren_token, left_brace_token, match_arms, right_brace_token, ) } fn parse_match_statement_arm(&mut self) -> S::Output { // SPEC: // // match-statement-arm: // pattern => statement // // We parse any statement here, but only compound statements are // currently permitted. We emit this error in a later pass. if self.peek_token_kind() == TokenKind::Case { self.skip_and_log_unexpected_token(/* generate_error = */ true); } let pattern = self.parse_pattern(); let arrow_token = self.require_token(TokenKind::EqualGreaterThan, Errors::error1028); let body = self.parse_statement(); self.sc_mut() .make_match_statement_arm(pattern, arrow_token, body) } fn parse_catch_clause_opt(&mut self) -> Option<S::Output> { // SPEC // catch ( type-specification-opt variable-name ) compound-statement if self.peek_token_kind() == TokenKind::Catch { let catch_token = self.assert_token(TokenKind::Catch); let left_paren = self.require_left_paren(); let catch_type = match self.peek_token_kind() { TokenKind::Variable => { self.with_error(Errors::error1007); let pos = self.pos(); self.sc_mut().make_missing(pos) } _ => self.parse_type_specifier(), }; let catch_var = self.require_variable(); let right_paren = self.require_right_paren(); let compound_stmt = self.parse_compound_statement(); let catch_clause = self.sc_mut().make_catch_clause( catch_token, left_paren, catch_type, catch_var, right_paren, compound_stmt, ); Some(catch_clause) } else { None } } fn parse_finally_clause_opt(&mut self) -> S::Output { // SPEC // finally-clause: // finally compound-statement if self.peek_token_kind() == TokenKind::Finally { let finally_token = self.assert_token(TokenKind::Finally); let compound_stmt = self.parse_compound_statement(); self.sc_mut() .make_finally_clause(finally_token, compound_stmt) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn parse_try_statement(&mut self) -> S::Output { // SPEC: // try-statement: // try compound-statement catch-clauses // try compound-statement finally-clause // try compound-statement catch-clauses finally-clause let try_keyword_token = self.assert_token(TokenKind::Try); let try_compound_stmt = self.parse_compound_statement(); let catch_clauses = self.parse_list_until_none(|x| x.parse_catch_clause_opt()); let finally_clause = self.parse_finally_clause_opt(); // If the catch and finally are both missing then we give an error in // a later pass. self.sc_mut().make_try_statement( try_keyword_token, try_compound_stmt, catch_clauses, finally_clause, ) } fn parse_break_statement(&mut self) -> S::Output { // SPEC // break-statement: // break ; // We detect if we are not inside a switch or loop in a later pass. let break_token = self.assert_token(TokenKind::Break); let semi_token = self.require_semicolon(); self.sc_mut().make_break_statement(break_token, semi_token) } fn parse_continue_statement(&mut self) -> S::Output { // SPEC // continue-statement: // continue ; // We detect if we are not inside a loop in a later pass. let continue_token = self.assert_token(TokenKind::Continue); let semi_token = self.require_semicolon(); self.sc_mut() .make_continue_statement(continue_token, semi_token) } fn parse_return_statement(&mut self) -> S::Output { let return_token = self.assert_token(TokenKind::Return); if self.peek_token_kind() == TokenKind::Semicolon { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let semi_token = self.next_token(); let semi_token = self.sc_mut().make_token(semi_token); self.sc_mut() .make_return_statement(return_token, missing, semi_token) } else { let expr = self.parse_expression(); let semi_token = self.require_semicolon(); self.sc_mut() .make_return_statement(return_token, expr, semi_token) } } fn parse_throw_statement(&mut self) -> S::Output { let throw_token = self.assert_token(TokenKind::Throw); let expr = self.parse_expression(); let semi_token = self.require_semicolon(); self.sc_mut() .make_throw_statement(throw_token, expr, semi_token) } fn parse_yield_break_statement(&mut self) -> S::Output { let yield_token = self.assert_token(TokenKind::Yield); let break_token = self.assert_token(TokenKind::Break); let semi_token = self.require_semicolon(); self.sc_mut() .make_yield_break_statement(yield_token, break_token, semi_token) } fn parse_simple_initializer_opt(&mut self) -> S::Output { if let TokenKind::Equal = self.peek_token_kind() { let token = self.assert_token(TokenKind::Equal); let init = self.parse_expression(); self.sc_mut().make_simple_initializer(token, init) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn parse_declare_local_statement(&mut self) -> S::Output { let let_token = self.assert_token(TokenKind::Let); let variable = self.parse_expression(); let colon_token = self.require_colon(); let hint = self.with_type_parser(|p: &mut TypeParser<'a, S>| p.parse_type_specifier(true, true)); let simple_init = self.parse_simple_initializer_opt(); let semi_token = self.require_semicolon(); self.sc_mut().make_declare_local_statement( let_token, variable, colon_token, hint, simple_init, semi_token, ) } fn parse_default_label(&mut self) -> S::Output { // // See comments under parse_switch_statement for the grammar. // TODO: Update the spec. // TODO: The spec is wrong; it implies that a statement must always follow // the default:, but in fact // switch($x) { default: } // is legal. Fix the spec. // TODO: PHP allows a default to end in a semi; Hack does not. We allow a semi // here; add an error in a later pass. let default_token = self.assert_token(TokenKind::Default); let colon_token = { if self.peek_token_kind() == TokenKind::Semicolon { let token = self.next_token(); self.sc_mut().make_token(token) } else { self.require_colon() } }; self.sc_mut().make_default_label(default_token, colon_token) } fn parse_case_label(&mut self) -> S::Output { // SPEC: // See comments under parse_switch_statement for the grammar. // TODO: The spec is wrong; it implies that a statement must always follow // the case, but in fact // switch($x) { case 10: } // is legal. Fix the spec. // TODO: PHP allows a case to end in a semi; Hack does not. We allow a semi // here; add an error in a later pass. let case_token = self.assert_token(TokenKind::Case); let expr = self.parse_expression(); let colon_token = { if self.peek_token_kind() == TokenKind::Semicolon { let token = self.next_token(); self.sc_mut().make_token(token) } else { self.require_colon() } }; self.sc_mut().make_case_label(case_token, expr, colon_token) } fn parse_concurrent_statement(&mut self) -> S::Output { let keyword = self.assert_token(TokenKind::Concurrent); let statement = self.parse_statement(); self.sc_mut().make_concurrent_statement(keyword, statement) } // SPEC: // TODO: update the spec to reflect that echo and print must be a statement // echo-intrinsic: // echo expression // echo ( expression ) // echo expression-list-two-or-more // // expression-list-two-or-more: // expression , expression // expression-list-two-or-more , expression fn parse_echo_statement(&mut self) -> S::Output { let token = self.assert_token(TokenKind::Echo); let expression_list = self.parse_comma_list(TokenKind::Semicolon, Errors::error1015, |x| { x.parse_expression() }); let semicolon = self.require_semicolon(); self.sc_mut() .make_echo_statement(token, expression_list, semicolon) } fn parse_expression_statement(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Semicolon => { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let token = self.next_token(); let token = self.sc_mut().make_token(token); self.sc_mut().make_expression_statement(missing, token) } _ => { self.expect_in_new_scope(ExpectedTokens::Semicolon); // Detect common patterns of users trying to put type // annotations on local variables. let saw_type_name = match self.peek_token_kind() { // User-defined types. TokenKind::Name => true, // Common reserved names that are types. TokenKind::Darray => true, TokenKind::Dict => true, TokenKind::Int => true, TokenKind::Keyset => true, TokenKind::Shape => true, TokenKind::String => true, TokenKind::Varray => true, TokenKind::Vec => true, _ => false, }; let expression = self.parse_expression(); let token = match self.require_semicolon_token(saw_type_name) { Some(t) => self.sc_mut().make_token(t), None => { let pos = self.pos(); self.sc_mut().make_missing(pos) } }; self.pop_scope(ExpectedTokens::Semicolon); self.sc_mut().make_expression_statement(expression, token) } } } pub fn parse_compound_statement(&mut self) -> S::Output { let mut parser1 = self.clone(); let token = parser1.next_token(); match token.kind() { TokenKind::Semicolon => { self.continue_from(parser1); self.sc_mut().make_token(token) } _ => { let left_brace_token = self.require_left_brace(); let statement_list = self.parse_terminated_list(|x| x.parse_statement(), TokenKind::RightBrace); let right_brace_token = self.require_right_brace(); self.sc_mut().make_compound_statement( left_brace_token, statement_list, right_brace_token, ) } } } fn parse_expression(&mut self) -> S::Output { self.with_expression_parser(|p: &mut ExpressionParser<'a, S>| p.parse_expression()) } fn parse_pattern(&mut self) -> S::Output { self.with_pattern_parser(|p: &mut PatternParser<'a, S>| p.parse_pattern()) } }
Rust
hhvm/hphp/hack/src/parser/core/type_parser.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use parser_core_types::lexable_token::LexableToken; use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_error::{self as Errors}; use parser_core_types::token_kind::TokenKind; use crate::declaration_parser::DeclarationParser; use crate::expression_parser::ExpressionParser; use crate::lexer::Lexer; use crate::parser_env::ParserEnv; use crate::parser_trait::Context; use crate::parser_trait::ParserTrait; use crate::parser_trait::SeparatedListKind; use crate::smart_constructors::NodeType; use crate::smart_constructors::SmartConstructors; use crate::smart_constructors::Token; #[derive(Clone)] pub struct TypeParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, } impl<'a, S> ParserTrait<'a, S> for TypeParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn make( mut lexer: Lexer<'a, S::Factory>, env: ParserEnv, context: Context<Token<S>>, errors: Vec<SyntaxError>, sc: S, ) -> Self { lexer.set_in_type(true); Self { lexer, env, context, errors, sc, } } fn into_parts( mut self, ) -> ( Lexer<'a, S::Factory>, Context<Token<S>>, Vec<SyntaxError>, S, ) { self.lexer.set_in_type(false); (self.lexer, self.context, self.errors, self.sc) } fn lexer(&self) -> &Lexer<'a, S::Factory> { &self.lexer } fn lexer_mut(&mut self) -> &mut Lexer<'a, S::Factory> { &mut self.lexer } fn continue_from<P: ParserTrait<'a, S>>(&mut self, other: P) { let (mut lexer, context, errors, sc) = other.into_parts(); lexer.set_in_type(true); self.lexer = lexer; self.context = context; self.errors = errors; self.sc = sc; } fn add_error(&mut self, error: SyntaxError) { self.errors.push(error) } fn env(&self) -> &ParserEnv { &self.env } fn sc_mut(&mut self) -> &mut S { &mut self.sc } fn drain_skipped_tokens(&mut self) -> std::vec::Drain<'_, Token<S>> { self.context.skipped_tokens.drain(..) } fn skipped_tokens(&self) -> &[Token<S>] { &self.context.skipped_tokens } fn context_mut(&mut self) -> &mut Context<Token<S>> { &mut self.context } fn context(&self) -> &Context<Token<S>> { &self.context } } impl<'a, S> TypeParser<'a, S> where S: SmartConstructors, S::Output: NodeType, { fn with_expression_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut ExpressionParser<'a, S>) -> U, { let mut lexer = self.lexer.clone(); lexer.set_in_type(false); let mut expression_parser: ExpressionParser<'_, S> = ExpressionParser::make( lexer, self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut expression_parser); self.continue_from(expression_parser); res } fn parse_expression(&mut self) -> S::Output { self.with_expression_parser(|p: &mut ExpressionParser<'a, S>| p.parse_expression()) } fn with_decl_parser<F, U>(&mut self, f: F) -> U where F: Fn(&mut DeclarationParser<'a, S>) -> U, { let mut lexer = self.lexer.clone(); lexer.set_in_type(false); let mut declaration_parser: DeclarationParser<'_, S> = DeclarationParser::make( lexer, self.env.clone(), self.context.clone(), self.errors.clone(), self.sc.clone(), ); let res = f(&mut declaration_parser); self.continue_from(declaration_parser); res } // parse type specifier but return missing if you fail to parse pub fn parse_type_specifier_opt(&mut self, allow_var: bool, allow_attr: bool) -> S::Output { // Strictly speaking, "mixed" is a nullable type specifier. We parse it as // a simple type specifier here. let mut parser1 = self.clone(); let token = parser1.next_xhp_class_name_or_other_token(); let new_attr_syntax = self.env.allow_new_attribute_syntax; let type_spec = match token.kind() { | TokenKind::Var if allow_var => { self.continue_from(parser1); let token = self.sc_mut().make_token(token); self.sc_mut().make_simple_type_specifier(token) } | TokenKind::This => self.parse_simple_type_or_type_constant(), | TokenKind::SelfToken => self.parse_type_constant(), // Any keyword-type could be a non-keyword type, because PHP, so check whether // these have generics. | TokenKind::Double // TODO: Specification does not mention double; fix it. | TokenKind::Bool | TokenKind::Boolean | TokenKind::Binary | TokenKind::Int | TokenKind::Integer | TokenKind::Float | TokenKind::Real | TokenKind::Num | TokenKind::String | TokenKind::Arraykey | TokenKind::Void | TokenKind::Noreturn | TokenKind::Resource | TokenKind::Mixed | TokenKind::NullLiteral | TokenKind::Name => self.parse_simple_type_or_type_constant_or_generic(), | TokenKind::Namespace => { let name = self.scan_name_or_qualified_name(); self.parse_remaining_simple_type_or_type_constant_or_generic(name) } | TokenKind::Backslash => { self.continue_from(parser1); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let token = self.sc_mut().make_token(token); let name = self.scan_qualified_name(missing, token); self.parse_remaining_simple_type_or_type_constant_or_generic(name) } | TokenKind::Category | TokenKind::XHP | TokenKind::XHPClassName => self.parse_simple_type_or_type_constant_or_generic(), | TokenKind::Darray => self.parse_darray_type_specifier(), | TokenKind::Varray => self.parse_varray_type_specifier(), | TokenKind::Vec => self.parse_vec_type_specifier(), | TokenKind::Dict => self.parse_dictionary_type_specifier(), | TokenKind::Keyset => self.parse_keyset_type_specifier(), | TokenKind::Tuple => self.parse_tuple_type_explicit_specifier(), | TokenKind::LeftParen => self.parse_tuple_or_closure_type_specifier(), | TokenKind::Shape => self.parse_shape_specifier(), | TokenKind::Question => self.parse_nullable_type_specifier(), | TokenKind::Tilde => self.parse_like_type_specifier(), | TokenKind::At if !new_attr_syntax => self.parse_soft_type_specifier(), | TokenKind::At if new_attr_syntax => self.parse_attributized_specifier(), | TokenKind::LessThanLessThan if allow_attr => self.parse_attributized_specifier(), | TokenKind::Classname => self.parse_classname_type_specifier(), | _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } }; match self.peek_token_kind() { TokenKind::With if self.peek_token_kind_with_lookahead(1) == TokenKind::LeftBrace => { self.parse_type_refinement(type_spec) } _ => type_spec, } } // TODO: What about something like for::for? Is that a legal type constant? pub fn parse_type_specifier(&mut self, allow_var: bool, allow_attr: bool) -> S::Output { let result = self.parse_type_specifier_opt(allow_var, allow_attr); if result.is_missing() { self.with_error_on_whole_token(Errors::error1007); let token = self.next_xhp_class_name_or_other_token(); let token = self.sc_mut().make_token(token); self.sc_mut().make_error(token) } else { result } } fn parse_type_refinement(&mut self, type_spec: S::Output) -> S::Output { // SPEC // type-refinement: // type-specifier with { type-refinement-members_opt ;opt } // // type-refinement-members: // type-refinement-member ; type-refinement-members let keyword = self.assert_token(TokenKind::With); let left_brace = self.require_left_brace(); let members = self.with_decl_parser( |x: &mut DeclarationParser<'a, S>| match x.peek_token_kind() { TokenKind::Type | TokenKind::Ctx => { // Note: blindly calling this without matching on expected token first // would result in confusing error "expected `}`" in error cases such as // `... with { const ... }` x.parse_separated_list( TokenKind::Semicolon, SeparatedListKind::TrailingAllowed, TokenKind::RightBrace, Errors::expected_refinement_member, |x| x.parse_refinement_member(), ) .0 } tk => { if tk != TokenKind::RightBrace { x.with_error(Errors::expected_refinement_member); } let pos = x.pos(); x.sc_mut().make_missing(pos) } }, ); let right_brace = self.require_right_brace(); let refinement = self.sc_mut().make_type_refinement( type_spec, keyword, left_brace, members, right_brace, ); if self.peek_token_kind() == TokenKind::With { self.with_error(Errors::cannot_chain_type_refinements); // ERROR RECOVERY: nest chained refinement return self.parse_type_refinement(refinement); } refinement } // SPEC // type-constant-type-name: // name :: name // self :: name // this :: name // parent :: name // type-constant-type-name :: name fn parse_remaining_type_constant(&mut self, left: S::Output) -> S::Output { let separator = self.fetch_token(); let right = self.next_token_as_name(); if right.kind() == TokenKind::Name { let right = self.sc_mut().make_token(right); let syntax = self.sc_mut().make_type_constant(left, separator, right); let token = self.peek_token(); if token.kind() == TokenKind::ColonColon { self.parse_remaining_type_constant(syntax) } else { syntax } } else { // ERROR RECOVERY: Assume that the thing following the :: // that is not a name belongs to the next thing to be // parsed; treat the name as missing. self.with_error(Errors::error1004); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); self.sc_mut().make_type_constant(left, separator, missing) } } fn parse_remaining_generic(&mut self, name: S::Output) -> S::Output { let (arguments, _) = self.parse_generic_type_argument_list(); self.sc_mut().make_generic_type_specifier(name, arguments) } pub fn parse_simple_type_or_type_constant(&mut self) -> S::Output { let name = self.next_xhp_class_name_or_other(); self.parse_remaining_simple_type_or_type_constant(name) } pub fn parse_simple_type_or_generic(&mut self) -> S::Output { let name = self.next_xhp_class_name_or_other(); self.parse_remaining_simple_type_or_generic(name) } fn parse_type_constant(&mut self) -> S::Output { let name = self.next_xhp_class_name_or_other(); let token = self.peek_token(); match token.kind() { TokenKind::ColonColon => self.parse_remaining_type_constant(name), _ => { self.with_error(Errors::error1047); self.sc_mut().make_error(name) } } } fn parse_remaining_simple_type_or_type_constant(&mut self, name: S::Output) -> S::Output { let token = self.peek_token(); match token.kind() { TokenKind::ColonColon => self.parse_remaining_type_constant(name), _ => self.sc_mut().make_simple_type_specifier(name), } } fn parse_simple_type_or_type_constant_or_generic(&mut self) -> S::Output { let name = self.next_xhp_class_name_or_other(); self.parse_remaining_simple_type_or_type_constant_or_generic(name) } pub fn parse_remaining_type_specifier(&mut self, name: S::Output) -> S::Output { self.parse_remaining_simple_type_or_type_constant_or_generic(name) } fn parse_remaining_simple_type_or_type_constant_or_generic( &mut self, name: S::Output, ) -> S::Output { match self.peek_token_kind_with_possible_attributized_type_list() { TokenKind::LessThan => self.parse_remaining_generic(name), _ => self.parse_remaining_simple_type_or_type_constant(name), } } fn parse_remaining_simple_type_or_generic(&mut self, name: S::Output) -> S::Output { match self.peek_token_kind_with_possible_attributized_type_list() { TokenKind::LessThan => self.parse_remaining_generic(name), _ => self.sc_mut().make_simple_type_specifier(name), } } // SPEC // generic-type-constraint-list: // generic-type-constraint // generic-type-constraint generic-type-constraint-list // // generic-type-constraint: // as type-specifier // super type-specifier // // TODO: SPEC ISSUES: // https://github.com/hhvm/hack-langspec/issues/83 // // TODO: Do we also need to allow "= type-specifier" here? fn parse_generic_type_constraint_opt(&mut self) -> Option<S::Output> { let mut parser1 = self.clone(); let token = parser1.next_token(); match token.kind() { TokenKind::As | TokenKind::Super => { self.continue_from(parser1); let constraint_token = self.sc_mut().make_token(token); let matched_type = self.parse_type_specifier(false, true); let type_constraint = self .sc_mut() .make_type_constraint(constraint_token, matched_type); Some(type_constraint) } _ => None, } } fn parse_variance_opt(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Plus | TokenKind::Minus => self.fetch_token(), _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } } } // SPEC // generic-type-parameter: // generic-type-parameter-reified-opt generic-type-parameter-variance-opt // name type-parameter-list? generic-type-constraint-list-opt // // generic-type-parameter-variance: // + // - // // TODO: SPEC ISSUE: We allow any number of type constraints, not just zero // or one as indicated in the spec. // https://github.com/hhvm/hack-langspec/issues/83 // TODO: Update the spec with reified pub fn parse_type_parameter(&mut self) -> S::Output { let attributes = self.with_decl_parser(|x: &mut DeclarationParser<'a, S>| { x.parse_attribute_specification_opt() }); let reified = self.optional_token(TokenKind::Reify); let variance = self.parse_variance_opt(); let type_name = self.require_name_allow_all_keywords(); let param_params = self.parse_generic_type_parameter_list_opt(); let constraints = self.parse_list_until_none(|x: &mut Self| x.parse_generic_type_constraint_opt()); self.sc_mut().make_type_parameter( attributes, reified, variance, type_name, param_params, constraints, ) } pub fn parse_generic_type_parameter_list_opt(&mut self) -> S::Output { match self.peek_token_kind_with_possible_attributized_type_list() { TokenKind::LessThan => self.parse_generic_type_parameter_list(), _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } } } // SPEC // type-parameter-list: // < generic-type-parameters ,-opt > // // generic-type-parameters: // generic-type-parameter // generic-type-parameter , generic-type-parameter // pub fn parse_generic_type_parameter_list(&mut self) -> S::Output { let left = self.assert_left_angle_in_type_list_with_possible_attribute(); let (params, _) = self.parse_comma_list_allow_trailing( TokenKind::GreaterThan, Errors::error1007, |x: &mut Self| x.parse_type_parameter(), ); let right = self.require_right_angle(); self.sc_mut().make_type_parameters(left, params, right) } fn parse_type_list(&mut self, close_kind: TokenKind) -> S::Output { // SPEC: // type-specifier-list: // type-specifiers ,opt // // type-specifiers: // type-specifier // type-specifiers , type-specifier let (items, _) = self.parse_comma_list_allow_trailing(close_kind, Errors::error1007, |x: &mut Self| { x.parse_type_specifier(false, true) }); items } // SPEC // // TODO: Add this to the specification. // (This work is tracked by task T22582676.) // // call-convention: // inout fn parse_call_convention_opt(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Inout => { let token = self.next_token(); self.sc_mut().make_token(token) } _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } } } // SPEC // // TODO: Add this to the specification. // (This work is tracked by task T85043839.) // // readonly: // readonly fn parse_readonly_opt(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::Readonly => { let token = self.next_token(); self.sc_mut().make_token(token) } _ => { let pos = self.pos(); self.sc_mut().make_missing(pos) } } } // SPEC // // TODO: Add this to the specification. // (This work is tracked by task T22582676.) // // closure-param-type-specifier-list: // closure-param-type-specifiers ,opt // // closure-param-type-specifiers: // closure-param-type-specifier // closure-param-type-specifiers , closure-param-type-specifier fn parse_closure_param_list(&mut self, close_kind: TokenKind) -> S::Output { let (items, _) = self.parse_comma_list_allow_trailing(close_kind, Errors::error1007, |x: &mut Self| { x.parse_closure_param_type_or_ellipsis() }); items } // SPEC // // TODO: Add this to the specification. // (This work is tracked by task T22582676.) // // ERROR RECOVERY: Variadic params cannot be declared inout; this error is // caught in a later pass. // // closure-param-type-specifier: // call-convention-opt type-specifier // type-specifier ... // ... fn parse_closure_param_type_or_ellipsis(&mut self) -> S::Output { match self.peek_token_kind() { TokenKind::DotDotDot => { let pos = self.pos(); let missing1 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing2 = self.sc_mut().make_missing(pos); let token = self.next_token(); let token = self.sc_mut().make_token(token); self.sc_mut() .make_variadic_parameter(missing1, missing2, token) } _ => { let callconv = self.parse_call_convention_opt(); let readonly = self.parse_readonly_opt(); let ts = self.parse_type_specifier(false, true); match self.peek_token_kind() { TokenKind::DotDotDot => { let token = self.next_token(); let token = self.sc_mut().make_token(token); self.sc_mut().make_variadic_parameter(callconv, ts, token) } _ => self .sc_mut() .make_closure_parameter_type_specifier(callconv, readonly, ts), } } } } fn parse_optionally_reified_type(&mut self) -> S::Output { if self.peek_token_kind() == TokenKind::Reify { let token = self.next_token(); let reified_kw = self.sc_mut().make_token(token); let type_argument = self.parse_type_specifier(false, true); self.sc_mut() .make_reified_type_argument(reified_kw, type_argument) } else { self.parse_type_specifier(false, true) } } pub fn parse_generic_type_argument_list(&mut self) -> (S::Output, bool) { // SPEC: // generic-type-argument-list: // < generic-type-arguments ,opt > // // generic-type-arguments: // generic-type-argument // generic-type-arguments , generic-type-argument // // TODO: SPEC ISSUE // https://github.com/hhvm/hack-langspec/issues/84 // The specification indicates that "noreturn" is only syntactically valid // as a return type hint, but this is plainly wrong because // Awaitable<noreturn> is a legal type. Likely the correct rule will be to // allow noreturn as a type argument, and then a later semantic analysis // pass can determine when it is being used incorrectly. // // For now, we extend the specification to allow return types, not just // ordinary types. let open_angle = self.assert_left_angle_in_type_list_with_possible_attribute(); let (args, no_arg_is_missing) = self.parse_comma_list_allow_trailing( TokenKind::GreaterThan, Errors::error1007, |x: &mut Self| x.parse_optionally_reified_type(), ); match self.peek_token_kind() { TokenKind::GreaterThan => { let close_angle = self.assert_token(TokenKind::GreaterThan); let result = self .sc_mut() .make_type_arguments(open_angle, args, close_angle); (result, no_arg_is_missing) } _ => { // ERROR RECOVERY: Don't eat the token that is in the place of the // missing > or ,. TokenKind::Assume that it is the > that is missing and // try to parse whatever is coming after the type. self.with_error(Errors::error1014); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let result = self.sc_mut().make_type_arguments(open_angle, args, missing); (result, no_arg_is_missing) } } } fn parse_darray_type_specifier(&mut self) -> S::Output { // darray<type, type> let array_token = self.assert_token(TokenKind::Darray); if self.peek_token_kind_with_possible_attributized_type_list() != TokenKind::LessThan { self.sc_mut().make_simple_type_specifier(array_token) } else { let left_angle = self.assert_left_angle_in_type_list_with_possible_attribute(); let key_type = self.parse_type_specifier(false, true); let comma = self.require_comma(); let value_type = self.parse_type_specifier(false, true); let optional_comma = self.optional_token(TokenKind::Comma); let right_angle = self.require_right_angle(); self.sc_mut().make_darray_type_specifier( array_token, left_angle, key_type, comma, value_type, optional_comma, right_angle, ) } } fn parse_varray_type_specifier(&mut self) -> S::Output { // varray<type> let array_token = self.assert_token(TokenKind::Varray); if self.peek_token_kind_with_possible_attributized_type_list() != TokenKind::LessThan { self.sc_mut().make_simple_type_specifier(array_token) } else { let left_angle = self.assert_left_angle_in_type_list_with_possible_attribute(); let value_type = self.parse_type_specifier(false, true); let optional_comma = self.optional_token(TokenKind::Comma); let right_angle = self.require_right_angle(); self.sc_mut().make_varray_type_specifier( array_token, left_angle, value_type, optional_comma, right_angle, ) } } fn parse_vec_type_specifier(&mut self) -> S::Output { // vec < type-specifier > // TODO: Should we allow a trailing comma? // TODO: Add this to the specification // ERROR RECOVERY: If there is no type argument list then just make // this a simple type. TODO: Should this be an error at parse time? what // about at type checking time? let keyword = self.assert_token(TokenKind::Vec); if self.peek_token_kind_with_possible_attributized_type_list() != TokenKind::LessThan { self.sc_mut().make_simple_type_specifier(keyword) } else { let left = self.assert_left_angle_in_type_list_with_possible_attribute(); let t = self.parse_type_specifier(false, true); let optional_comma = self.optional_token(TokenKind::Comma); let right = self.require_right_angle(); self.sc_mut() .make_vector_type_specifier(keyword, left, t, optional_comma, right) } } fn parse_keyset_type_specifier(&mut self) -> S::Output { // keyset < type-specifier > // TODO: Should we allow a trailing comma? // TODO: Add this to the specification // ERROR RECOVERY: If there is no type argument list then just make // this a simple type. TODO: Should this be an error at parse time? what // about at type checking time? let keyword = self.assert_token(TokenKind::Keyset); if self.peek_token_kind_with_possible_attributized_type_list() != TokenKind::LessThan { self.sc_mut().make_simple_type_specifier(keyword) } else { let left = self.assert_left_angle_in_type_list_with_possible_attribute(); let t = self.parse_type_specifier(false, true); let comma = self.optional_token(TokenKind::Comma); let right = self.require_right_angle(); self.sc_mut() .make_keyset_type_specifier(keyword, left, t, comma, right) } } fn parse_tuple_type_explicit_specifier(&mut self) -> S::Output { // tuple < type-specifier-list > // TODO: Add this to the specification let keyword = self.assert_token(TokenKind::Tuple); let left_angle = if self.peek_next_partial_token_is_triple_left_angle() { self.assert_left_angle_in_type_list_with_possible_attribute() } else { self.require_left_angle() }; let args = self.parse_type_list(TokenKind::GreaterThan); let mut parser1 = self.clone(); let right_angle = parser1.next_token(); if right_angle.kind() == TokenKind::GreaterThan { self.continue_from(parser1); let token = self.sc_mut().make_token(right_angle); self.sc_mut() .make_tuple_type_explicit_specifier(keyword, left_angle, args, token) } else { // ERROR RECOVERY: Don't eat the token that is in the place of the // missing > or ,. TokenKind::Assume that it is the > that is missing and // try to parse whatever is coming after the type. self.with_error(Errors::error1022); let pos = self.pos(); let right_angle = self.sc_mut().make_missing(pos); self.sc_mut() .make_tuple_type_explicit_specifier(keyword, left_angle, args, right_angle) } } fn parse_dictionary_type_specifier(&mut self) -> S::Output { // dict < type-specifier , type-specifier > // // TODO: Add this to the specification // // Though we require there to be exactly two items, we actually parse // an arbitrary comma-separated list here. // // TODO: Give an error in a later pass if there are not exactly two members. // // ERROR RECOVERY: If there is no type argument list then just make this // a simple type. TODO: Should this be an error at parse time? what // about at type checking time? let keyword = self.assert_token(TokenKind::Dict); if self.peek_token_kind_with_possible_attributized_type_list() != TokenKind::LessThan { self.sc_mut().make_simple_type_specifier(keyword) } else { // TODO: This allows "noreturn" as a type argument. Should we // disallow that at parse time? let left = self.assert_left_angle_in_type_list_with_possible_attribute(); let (arguments, _) = self.parse_comma_list_allow_trailing( TokenKind::GreaterThan, Errors::error1007, |x: &mut Self| x.parse_return_type(), ); let right = self.require_right_angle(); self.sc_mut() .make_dictionary_type_specifier(keyword, left, arguments, right) } } fn parse_tuple_or_closure_type_specifier(&mut self) -> S::Output { let mut parser1 = self.clone(); let _ = parser1.assert_token(TokenKind::LeftParen); let token = parser1.peek_token(); match token.kind() { TokenKind::Readonly | TokenKind::Function => self.parse_closure_type_specifier(), _ => self.parse_tuple_or_union_or_intersection_type_specifier(), } } pub fn parse_contexts(&mut self) -> S::Output { if self.peek_token_kind() == TokenKind::LeftBracket { let (left_bracket, types, right_bracket) = self .parse_bracketted_comma_list_opt_allow_trailing(|x: &mut Self| { match x.peek_token_kind() { TokenKind::Ctx => { let ctx = x.assert_token(TokenKind::Ctx); let var = x.with_expression_parser(|p: &mut ExpressionParser<'a, S>| { p.parse_simple_variable() }); x.sc_mut().make_function_ctx_type_specifier(ctx, var) } TokenKind::Variable => { /* Keeping this isolated from the type constant parsing code for now */ let var = x.assert_token(TokenKind::Variable); let colcol = x.require_coloncolon(); let name = x.require_name(); x.sc_mut().make_type_constant(var, colcol, name) } _ => x.parse_type_specifier(false, false), } }); self.sc_mut() .make_contexts(left_bracket, types, right_bracket) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) } } fn parse_closure_type_specifier(&mut self) -> S::Output { // SPEC // // TODO: Update the specification with closure-param-type-specifier-list-opt. // (This work is tracked by task T22582676.) // // TODO: Update grammar for inout parameters. // (This work is tracked by task T22582715.) // // TODO: Update grammar for readonly keyword // (This work is tracked by task T87253111.) // closure-type-specifier: // ( readonly-opt // function ( \ // closure-param-type-specifier-list-opt \ // ) : type-specifier ) // // TODO: Error recovery is pretty weak here. We could be smarter. let olp = self.fetch_token(); let ro = self.parse_readonly_opt(); let fnc = self.fetch_token(); let ilp = self.require_left_paren(); let (pts, irp) = if self.peek_token_kind() == TokenKind::RightParen { let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); let token = self.next_token(); let token = self.sc_mut().make_token(token); (missing, token) } else { // TODO add second pass checking to ensure ellipsis is the last arg let pts = self.parse_closure_param_list(TokenKind::RightParen); let irp = self.require_right_paren(); (pts, irp) }; let ctxs = self.parse_contexts(); let col = self.require_colon(); let readonly = self.parse_readonly_opt(); let ret = self.parse_type_specifier(false, true); let orp = self.require_right_paren(); self.sc_mut() .make_closure_type_specifier(olp, ro, fnc, ilp, pts, irp, ctxs, col, readonly, ret, orp) } fn parse_tuple_or_union_or_intersection_type_specifier(&mut self) -> S::Output { // SPEC // tuple-union-intersection-type-specifier: // ( type-specifier , type-specifier-list ) // ( type-specifier & intersection-type-specifier-list ) // ( type-specifier | union-type-specifier-list ) // type-specifier-list: // type-specifiers ,opt // type-specifiers // type-specifier // type-specifiers , type-specifier // intersection-type-specifier-list: // type-specifier // intersection-type-specifier-list & type-specifier // union-type-specifier-list: // type-specifier // union-type-specifier-list | type-specifier // TODO: Here we parse a type list with one or more items, but the grammar // actually requires a type list with two or more items. Give an error in // a later pass if there is only one item here. let left_paren = self.assert_token(TokenKind::LeftParen); let (args, _, separator_kind) = self.parse_separated_list_predicate( |x| x == TokenKind::Bar || x == TokenKind::Ampersand || x == TokenKind::Comma, SeparatedListKind::TrailingAllowed, |x| x == TokenKind::RightParen, Errors::error1007, |x: &mut Self| x.parse_type_specifier(false, true), ); if self.peek_token_kind() == TokenKind::RightParen { let right_paren = self.next_token(); let token = self.sc_mut().make_token(right_paren); match separator_kind { TokenKind::Bar => self .sc_mut() .make_union_type_specifier(left_paren, args, token), TokenKind::Ampersand => self .sc_mut() .make_intersection_type_specifier(left_paren, args, token), _ => self .sc_mut() .make_tuple_type_specifier(left_paren, args, token), } } else { // ERROR RECOVERY: Don't eat the token that is in the place of the // missing ) or ,. Assume that it is the ) that is missing and // try to parse whatever is coming after the type. self.with_error(Errors::error1022); let pos = self.pos(); let missing = self.sc_mut().make_missing(pos); self.sc_mut() .make_tuple_type_specifier(left_paren, args, missing) } } fn parse_nullable_type_specifier(&mut self) -> S::Output { // SPEC: // nullable-type-specifier: // ? type-specifier // mixed // // Note that we parse "mixed" as a simple type specifier, even though // technically it is classified as a nullable type specifier by the grammar. // Note that it is perfectly legal to have trivia between the ? and the // underlying type. let question = self.assert_token(TokenKind::Question); let nullable_type = self.parse_type_specifier(false, true); self.sc_mut() .make_nullable_type_specifier(question, nullable_type) } fn parse_like_type_specifier(&mut self) -> S::Output { // SPEC: // like-type-specifier: // ~ type-specifier // // Note that it is perfectly legal to have trivia between the ~ and the // underlying type. let tilde = self.assert_token(TokenKind::Tilde); let like_type = self.parse_type_specifier(false, true); self.sc_mut().make_like_type_specifier(tilde, like_type) } fn parse_soft_type_specifier(&mut self) -> S::Output { // SPEC (Draft) // soft-type-specifier: // @ type-specifier // // TODO: The spec does not mention this type grammar. Work out where and // when it is legal, and what the exact semantics are, and put it in the spec. // Add an error pass if necessary to identify illegal usages of this type. // // Note that it is legal for trivia to come between the @ and the type. let soft_at = self.assert_token(TokenKind::At); let soft_type = self.parse_type_specifier(false, true); self.sc_mut().make_soft_type_specifier(soft_at, soft_type) } fn parse_attributized_specifier(&mut self) -> S::Output { // SPEC // attributized-specifier: // attribute-specification-opt type-specifier let attribute_spec_opt = self.with_decl_parser(|x: &mut DeclarationParser<'a, S>| { x.parse_attribute_specification_opt() }); let attributized_type = self.parse_type_specifier(false, true); self.sc_mut() .make_attributized_specifier(attribute_spec_opt, attributized_type) } fn parse_classname_type_specifier(&mut self) -> S::Output { // SPEC // classname-type-specifier: // classname // classname < qualified-name generic-type-argument-list-opt > // // TODO: We parse any type as the class name type; we should write an // error detection pass later that determines when this is a bad type. // // TODO: Is this grammar correct? In particular, can the name have a // scope resolution operator (::) in it? Find out and update the spec if // this is permitted. // TODO ERROR RECOVERY is unsophisticated here. let classname = self.fetch_token(); match self.peek_token_kind() { TokenKind::LessThan => { let left_angle = self.require_left_angle(); let classname_type = self.parse_type_specifier(false, true); let optional_comma = self.optional_token(TokenKind::Comma); let right_angle = self.require_right_angle(); self.sc_mut().make_classname_type_specifier( classname, left_angle, classname_type, optional_comma, right_angle, ) } _ => { let pos = self.pos(); let missing1 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing2 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing3 = self.sc_mut().make_missing(pos); let pos = self.pos(); let missing4 = self.sc_mut().make_missing(pos); self.sc_mut().make_classname_type_specifier( classname, missing1, missing2, missing3, missing4, ) } } } fn parse_field_specifier(&mut self) -> S::Output { // SPEC // field-specifier: // ?-opt present-field-specifier // present-field-specifier: // single-quoted-string-literal => type-specifier // qualified-name => type-specifier // scope-resolution-expression => type-specifier // TODO: We require that it be either all literals or no literals in the // set of specifiers; make an error reporting pass that detects this. // ERROR RECOVERY: We allow any expression for the left-hand side. // TODO: Make an error-detecting pass that gives an error if the left-hand // side is not a literal or name. let question = if self.peek_token_kind() == TokenKind::Question { self.assert_token(TokenKind::Question) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) }; let name = self.parse_expression(); let arrow = self.require_arrow(); let field_type = self.parse_type_specifier(false, true); self.sc_mut() .make_field_specifier(question, name, arrow, field_type) } fn parse_shape_specifier(&mut self) -> S::Output { // SPEC // shape-specifier: // shape ( field-specifier-list-opt ) // field-specifier-list: // field-specifiers , ... // field-specifiers ,-opt // field-specifiers: // field-specifier // field-specifiers , field-specifier // // TODO: ERROR RECOVERY is not very sophisticated here. let shape = self.fetch_token(); let lparen = self.require_left_paren(); let is_closing_token = |x: TokenKind| x == TokenKind::RightParen || x == TokenKind::DotDotDot; let fields = self.parse_comma_list_opt_allow_trailing_predicate( is_closing_token, Errors::error1025, |x: &mut Self| x.parse_field_specifier(), ); let ellipsis = if self.peek_token_kind() == TokenKind::DotDotDot { self.assert_token(TokenKind::DotDotDot) } else { let pos = self.pos(); self.sc_mut().make_missing(pos) }; let rparen = self.require_right_paren(); self.sc_mut() .make_shape_type_specifier(shape, lparen, fields, ellipsis, rparen) } pub(crate) fn parse_type_constraint_opt(&mut self, allow_super: bool) -> Option<S::Output> { // SPEC // type-constraint: // as type-specifier // super type-specifier // TODO: What about = ? let make = |x: &mut Self| { let constraint = x.next_token(); let constraint = x.sc_mut().make_token(constraint); let constraint_type = x.parse_type_specifier(false, true); Some(x.sc_mut().make_type_constraint(constraint, constraint_type)) }; let token = self.peek_token_kind(); match token { TokenKind::As => make(self), TokenKind::Super if allow_super => make(self), _ => None, } } pub fn parse_context_constraint_opt(&mut self) -> Option<S::Output> { // SPEC // context-constraint: // as context-list // super context-list match self.peek_token_kind() { TokenKind::As | TokenKind::Super => { let constraint_token = self.next_token(); let constraint_token = self.sc_mut().make_token(constraint_token); let constraint_ctx = self.parse_contexts(); Some( self.sc_mut() .make_context_constraint(constraint_token, constraint_ctx), ) } _ => None, } } pub fn parse_return_type(&mut self) -> S::Output { if self.peek_token_kind() == TokenKind::Noreturn { let token = self.next_token(); self.sc_mut().make_token(token) } else { self.parse_type_specifier(false, true) } } // Same as parse_return_type but can return missing pub fn parse_return_type_opt(&mut self) -> S::Output { if self.peek_token_kind() == TokenKind::Noreturn { let token = self.next_token(); self.sc_mut().make_token(token) } else { self.parse_type_specifier_opt(false, true) } } }
TOML
hhvm/hphp/hack/src/parser/ffi_bridge/Cargo.toml
# @generated by autocargo [package] name = "parser_ffi" version = "0.0.0" edition = "2021" [lib] path = "parser_ffi.rs" test = false doctest = false crate-type = ["lib", "staticlib"] [dependencies] bumpalo = { version = "3.11.1", features = ["collections"] } cxx = "1.0.100" parser_core_types = { version = "0.0.0", path = "../cargo/core_types" } positioned_full_trivia_parser = { version = "0.0.0", path = "../api/cargo/positioned_full_trivia_parser" } relative_path = { version = "0.0.0", path = "../../utils/rust/relative_path" } serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } [build-dependencies] cxx-build = "1.0.100"
Rust
hhvm/hphp/hack/src/parser/ffi_bridge/parser_ffi.rs
use std::path::PathBuf; use std::sync::Arc; /** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * */ use cxx::CxxString; use parser_core_types::indexed_source_text::IndexedSourceText; use parser_core_types::source_text::SourceText; use relative_path::Prefix; use relative_path::RelativePath; #[cxx::bridge] mod ffi { struct ParserEnv { codegen: bool, hhvm_compat_mode: bool, php5_compat_mode: bool, allow_new_attribute_syntax: bool, enable_xhp_class_modifier: bool, disable_xhp_element_mangling: bool, disable_xhp_children_declarations: bool, interpret_soft_types_as_like_types: bool, } extern "Rust" { pub fn hackc_parse_positioned_full_trivia( source_text: &CxxString, env: &ParserEnv, ) -> Vec<u8>; } } pub fn hackc_parse_positioned_full_trivia( source_text: &CxxString, env: &ffi::ParserEnv, ) -> Vec<u8> { let filepath = RelativePath::make(Prefix::Dummy, PathBuf::new()); let env: parser_core_types::parser_env::ParserEnv = ffi::ParserEnv::to_parser_env(env); let indexed_source = IndexedSourceText::new(SourceText::make(Arc::new(filepath), source_text.as_bytes())); let alloc = bumpalo::Bump::new(); let mut serializer = serde_json::Serializer::new(vec![]); match positioned_full_trivia_parser::parse_script_to_json( &alloc, &mut serializer, &indexed_source, env, ) { Ok(()) => serializer.into_inner(), Err(_) => { // Swallow errors. Default::default() } } } impl ffi::ParserEnv { fn to_parser_env(env: &ffi::ParserEnv) -> parser_core_types::parser_env::ParserEnv { parser_core_types::parser_env::ParserEnv { codegen: env.codegen, hhvm_compat_mode: env.hhvm_compat_mode, php5_compat_mode: env.php5_compat_mode, allow_new_attribute_syntax: env.allow_new_attribute_syntax, enable_xhp_class_modifier: env.enable_xhp_class_modifier, disable_xhp_element_mangling: env.disable_xhp_element_mangling, disable_xhp_children_declarations: env.disable_xhp_children_declarations, interpret_soft_types_as_like_types: env.interpret_soft_types_as_like_types, } } }
JSON
hhvm/hphp/hack/src/parser/js/full_fidelity_schema.json
{ "description" : "@generated JSON schema of the Hack Full Fidelity Parser AST", "version" : "2023-07-25-0000", "trivia" : [ { "trivia_kind_name" : "WhiteSpace", "trivia_type_name" : "whitespace" }, { "trivia_kind_name" : "EndOfLine", "trivia_type_name" : "end_of_line" }, { "trivia_kind_name" : "DelimitedComment", "trivia_type_name" : "delimited_comment" }, { "trivia_kind_name" : "SingleLineComment", "trivia_type_name" : "single_line_comment" }, { "trivia_kind_name" : "FixMe", "trivia_type_name" : "fix_me" }, { "trivia_kind_name" : "IgnoreError", "trivia_type_name" : "ignore_error" }, { "trivia_kind_name" : "FallThrough", "trivia_type_name" : "fall_through" }, { "trivia_kind_name" : "ExtraTokenError", "trivia_type_name" : "extra_token_error" } ], "tokens" : [ { "token_kind" : "Abstract", "token_text" : "abstract" }, { "token_kind" : "Arraykey", "token_text" : "arraykey" }, { "token_kind" : "As", "token_text" : "as" }, { "token_kind" : "Async", "token_text" : "async" }, { "token_kind" : "Attribute", "token_text" : "attribute" }, { "token_kind" : "Await", "token_text" : "await" }, { "token_kind" : "Backslash", "token_text" : "\\" }, { "token_kind" : "Binary", "token_text" : "binary" }, { "token_kind" : "Bool", "token_text" : "bool" }, { "token_kind" : "Boolean", "token_text" : "boolean" }, { "token_kind" : "Break", "token_text" : "break" }, { "token_kind" : "Case", "token_text" : "case" }, { "token_kind" : "Catch", "token_text" : "catch" }, { "token_kind" : "Category", "token_text" : "category" }, { "token_kind" : "Children", "token_text" : "children" }, { "token_kind" : "Class", "token_text" : "class" }, { "token_kind" : "Classname", "token_text" : "classname" }, { "token_kind" : "Clone", "token_text" : "clone" }, { "token_kind" : "Concurrent", "token_text" : "concurrent" }, { "token_kind" : "Const", "token_text" : "const" }, { "token_kind" : "Construct", "token_text" : "__construct" }, { "token_kind" : "Continue", "token_text" : "continue" }, { "token_kind" : "Ctx", "token_text" : "ctx" }, { "token_kind" : "Darray", "token_text" : "darray" }, { "token_kind" : "Default", "token_text" : "default" }, { "token_kind" : "Dict", "token_text" : "dict" }, { "token_kind" : "Do", "token_text" : "do" }, { "token_kind" : "Double", "token_text" : "double" }, { "token_kind" : "Echo", "token_text" : "echo" }, { "token_kind" : "Else", "token_text" : "else" }, { "token_kind" : "Empty", "token_text" : "empty" }, { "token_kind" : "Endif", "token_text" : "endif" }, { "token_kind" : "Enum", "token_text" : "enum" }, { "token_kind" : "Eval", "token_text" : "eval" }, { "token_kind" : "Exports", "token_text" : "exports" }, { "token_kind" : "Extends", "token_text" : "extends" }, { "token_kind" : "Fallthrough", "token_text" : "fallthrough" }, { "token_kind" : "Float", "token_text" : "float" }, { "token_kind" : "File", "token_text" : "file" }, { "token_kind" : "Final", "token_text" : "final" }, { "token_kind" : "Finally", "token_text" : "finally" }, { "token_kind" : "For", "token_text" : "for" }, { "token_kind" : "Foreach", "token_text" : "foreach" }, { "token_kind" : "Function", "token_text" : "function" }, { "token_kind" : "Global", "token_text" : "global" }, { "token_kind" : "If", "token_text" : "if" }, { "token_kind" : "Implements", "token_text" : "implements" }, { "token_kind" : "Imports", "token_text" : "imports" }, { "token_kind" : "Include", "token_text" : "include" }, { "token_kind" : "Include_once", "token_text" : "include_once" }, { "token_kind" : "Inout", "token_text" : "inout" }, { "token_kind" : "Instanceof", "token_text" : "instanceof" }, { "token_kind" : "Insteadof", "token_text" : "insteadof" }, { "token_kind" : "Int", "token_text" : "int" }, { "token_kind" : "Integer", "token_text" : "integer" }, { "token_kind" : "Interface", "token_text" : "interface" }, { "token_kind" : "Is", "token_text" : "is" }, { "token_kind" : "Isset", "token_text" : "isset" }, { "token_kind" : "Keyset", "token_text" : "keyset" }, { "token_kind" : "Lateinit", "token_text" : "lateinit" }, { "token_kind" : "List", "token_text" : "list" }, { "token_kind" : "Match", "token_text" : "match" }, { "token_kind" : "Mixed", "token_text" : "mixed" }, { "token_kind" : "Module", "token_text" : "module" }, { "token_kind" : "Namespace", "token_text" : "namespace" }, { "token_kind" : "New", "token_text" : "new" }, { "token_kind" : "Newctx", "token_text" : "newctx" }, { "token_kind" : "Newtype", "token_text" : "newtype" }, { "token_kind" : "Noreturn", "token_text" : "noreturn" }, { "token_kind" : "Num", "token_text" : "num" }, { "token_kind" : "Parent", "token_text" : "parent" }, { "token_kind" : "Print", "token_text" : "print" }, { "token_kind" : "Private", "token_text" : "private" }, { "token_kind" : "Protected", "token_text" : "protected" }, { "token_kind" : "Public", "token_text" : "public" }, { "token_kind" : "Real", "token_text" : "real" }, { "token_kind" : "Reify", "token_text" : "reify" }, { "token_kind" : "Require", "token_text" : "require" }, { "token_kind" : "Require_once", "token_text" : "require_once" }, { "token_kind" : "Required", "token_text" : "required" }, { "token_kind" : "Resource", "token_text" : "resource" }, { "token_kind" : "Return", "token_text" : "return" }, { "token_kind" : "Self", "token_text" : "self" }, { "token_kind" : "Shape", "token_text" : "shape" }, { "token_kind" : "Static", "token_text" : "static" }, { "token_kind" : "String", "token_text" : "string" }, { "token_kind" : "Super", "token_text" : "super" }, { "token_kind" : "Switch", "token_text" : "switch" }, { "token_kind" : "This", "token_text" : "this" }, { "token_kind" : "Throw", "token_text" : "throw" }, { "token_kind" : "Trait", "token_text" : "trait" }, { "token_kind" : "Try", "token_text" : "try" }, { "token_kind" : "Tuple", "token_text" : "tuple" }, { "token_kind" : "Type", "token_text" : "type" }, { "token_kind" : "Unset", "token_text" : "unset" }, { "token_kind" : "Upcast", "token_text" : "upcast" }, { "token_kind" : "Use", "token_text" : "use" }, { "token_kind" : "Using", "token_text" : "using" }, { "token_kind" : "Var", "token_text" : "var" }, { "token_kind" : "Varray", "token_text" : "varray" }, { "token_kind" : "Vec", "token_text" : "vec" }, { "token_kind" : "Void", "token_text" : "void" }, { "token_kind" : "With", "token_text" : "with" }, { "token_kind" : "Where", "token_text" : "where" }, { "token_kind" : "While", "token_text" : "while" }, { "token_kind" : "Yield", "token_text" : "yield" }, { "token_kind" : "NullLiteral", "token_text" : "null" }, { "token_kind" : "LeftBracket", "token_text" : "[" }, { "token_kind" : "RightBracket", "token_text" : "]" }, { "token_kind" : "LeftParen", "token_text" : "(" }, { "token_kind" : "RightParen", "token_text" : ")" }, { "token_kind" : "LeftBrace", "token_text" : "{" }, { "token_kind" : "RightBrace", "token_text" : "}" }, { "token_kind" : "Dot", "token_text" : "." }, { "token_kind" : "MinusGreaterThan", "token_text" : "->" }, { "token_kind" : "PlusPlus", "token_text" : "++" }, { "token_kind" : "MinusMinus", "token_text" : "--" }, { "token_kind" : "StarStar", "token_text" : "**" }, { "token_kind" : "Star", "token_text" : "*" }, { "token_kind" : "Plus", "token_text" : "+" }, { "token_kind" : "Minus", "token_text" : "-" }, { "token_kind" : "Tilde", "token_text" : "~" }, { "token_kind" : "Exclamation", "token_text" : "!" }, { "token_kind" : "Dollar", "token_text" : "$" }, { "token_kind" : "Slash", "token_text" : "/" }, { "token_kind" : "Percent", "token_text" : "%" }, { "token_kind" : "LessThanEqualGreaterThan", "token_text" : "<=>" }, { "token_kind" : "LessThanLessThan", "token_text" : "<<" }, { "token_kind" : "GreaterThanGreaterThan", "token_text" : ">>" }, { "token_kind" : "LessThan", "token_text" : "<" }, { "token_kind" : "GreaterThan", "token_text" : ">" }, { "token_kind" : "LessThanEqual", "token_text" : "<=" }, { "token_kind" : "GreaterThanEqual", "token_text" : ">=" }, { "token_kind" : "EqualEqual", "token_text" : "==" }, { "token_kind" : "EqualEqualEqual", "token_text" : "===" }, { "token_kind" : "ExclamationEqual", "token_text" : "!=" }, { "token_kind" : "ExclamationEqualEqual", "token_text" : "!==" }, { "token_kind" : "Carat", "token_text" : "^" }, { "token_kind" : "Bar", "token_text" : "|" }, { "token_kind" : "Ampersand", "token_text" : "&" }, { "token_kind" : "AmpersandAmpersand", "token_text" : "&&" }, { "token_kind" : "BarBar", "token_text" : "||" }, { "token_kind" : "Question", "token_text" : "?" }, { "token_kind" : "QuestionAs", "token_text" : "?as" }, { "token_kind" : "QuestionColon", "token_text" : "?:" }, { "token_kind" : "QuestionQuestion", "token_text" : "??" }, { "token_kind" : "QuestionQuestionEqual", "token_text" : "??=" }, { "token_kind" : "Colon", "token_text" : ":" }, { "token_kind" : "Semicolon", "token_text" : ";" }, { "token_kind" : "Equal", "token_text" : "=" }, { "token_kind" : "StarStarEqual", "token_text" : "**=" }, { "token_kind" : "StarEqual", "token_text" : "*=" }, { "token_kind" : "SlashEqual", "token_text" : "/=" }, { "token_kind" : "PercentEqual", "token_text" : "%=" }, { "token_kind" : "PlusEqual", "token_text" : "+=" }, { "token_kind" : "MinusEqual", "token_text" : "-=" }, { "token_kind" : "DotEqual", "token_text" : ".=" }, { "token_kind" : "LessThanLessThanEqual", "token_text" : "<<=" }, { "token_kind" : "GreaterThanGreaterThanEqual", "token_text" : ">>=" }, { "token_kind" : "AmpersandEqual", "token_text" : "&=" }, { "token_kind" : "CaratEqual", "token_text" : "^=" }, { "token_kind" : "BarEqual", "token_text" : "|=" }, { "token_kind" : "Comma", "token_text" : "," }, { "token_kind" : "At", "token_text" : "@" }, { "token_kind" : "ColonColon", "token_text" : "::" }, { "token_kind" : "EqualGreaterThan", "token_text" : "=>" }, { "token_kind" : "EqualEqualGreaterThan", "token_text" : "==>" }, { "token_kind" : "QuestionMinusGreaterThan", "token_text" : "?->" }, { "token_kind" : "DotDotDot", "token_text" : "..." }, { "token_kind" : "DollarDollar", "token_text" : "$$" }, { "token_kind" : "BarGreaterThan", "token_text" : "|>" }, { "token_kind" : "SlashGreaterThan", "token_text" : "/>" }, { "token_kind" : "LessThanSlash", "token_text" : "</" }, { "token_kind" : "LessThanQuestion", "token_text" : "<?" }, { "token_kind" : "Backtick", "token_text" : "`" }, { "token_kind" : "XHP", "token_text" : "xhp" }, { "token_kind" : "Hash", "token_text" : "#" }, { "token_kind" : "Readonly", "token_text" : "readonly" }, { "token_kind" : "Internal", "token_text" : "internal" }, { "token_kind" : "Package", "token_text" : "package" }, { "token_kind" : "Let", "token_text" : "let" }, { "token_kind" : "ErrorToken", "token_text" : null }, { "token_kind" : "Name", "token_text" : null }, { "token_kind" : "Variable", "token_text" : null }, { "token_kind" : "DecimalLiteral", "token_text" : null }, { "token_kind" : "OctalLiteral", "token_text" : null }, { "token_kind" : "HexadecimalLiteral", "token_text" : null }, { "token_kind" : "BinaryLiteral", "token_text" : null }, { "token_kind" : "FloatingLiteral", "token_text" : null }, { "token_kind" : "SingleQuotedStringLiteral", "token_text" : null }, { "token_kind" : "DoubleQuotedStringLiteral", "token_text" : null }, { "token_kind" : "DoubleQuotedStringLiteralHead", "token_text" : null }, { "token_kind" : "StringLiteralBody", "token_text" : null }, { "token_kind" : "DoubleQuotedStringLiteralTail", "token_text" : null }, { "token_kind" : "HeredocStringLiteral", "token_text" : null }, { "token_kind" : "HeredocStringLiteralHead", "token_text" : null }, { "token_kind" : "HeredocStringLiteralTail", "token_text" : null }, { "token_kind" : "NowdocStringLiteral", "token_text" : null }, { "token_kind" : "BooleanLiteral", "token_text" : null }, { "token_kind" : "XHPCategoryName", "token_text" : null }, { "token_kind" : "XHPElementName", "token_text" : null }, { "token_kind" : "XHPClassName", "token_text" : null }, { "token_kind" : "XHPStringLiteral", "token_text" : null }, { "token_kind" : "XHPBody", "token_text" : null }, { "token_kind" : "XHPComment", "token_text" : null }, { "token_kind" : "Hashbang", "token_text" : null }, { "token_kind" : "EndOfFile", "token_text" : null } ], "AST" : [ { "kind_name" : "EndOfFile", "type_name" : "end_of_file", "description" : "end_of_file", "prefix" : "end_of_file", "fields" : [ { "field_name" : "token" } ] }, { "kind_name" : "Script", "type_name" : "script", "description" : "script", "prefix" : "script", "fields" : [ { "field_name" : "declarations" } ] }, { "kind_name" : "QualifiedName", "type_name" : "qualified_name", "description" : "qualified_name", "prefix" : "qualified_name", "fields" : [ { "field_name" : "parts" } ] }, { "kind_name" : "ModuleName", "type_name" : "module_name", "description" : "module_name", "prefix" : "module_name", "fields" : [ { "field_name" : "parts" } ] }, { "kind_name" : "SimpleTypeSpecifier", "type_name" : "simple_type_specifier", "description" : "simple_type_specifier", "prefix" : "simple_type", "fields" : [ { "field_name" : "specifier" } ] }, { "kind_name" : "LiteralExpression", "type_name" : "literal_expression", "description" : "literal", "prefix" : "literal", "fields" : [ { "field_name" : "expression" } ] }, { "kind_name" : "PrefixedStringExpression", "type_name" : "prefixed_string_expression", "description" : "prefixed_string", "prefix" : "prefixed_string", "fields" : [ { "field_name" : "name" }, { "field_name" : "str" } ] }, { "kind_name" : "PrefixedCodeExpression", "type_name" : "prefixed_code_expression", "description" : "prefixed_code", "prefix" : "prefixed_code", "fields" : [ { "field_name" : "prefix" }, { "field_name" : "left_backtick" }, { "field_name" : "body" }, { "field_name" : "right_backtick" } ] }, { "kind_name" : "VariableExpression", "type_name" : "variable_expression", "description" : "variable", "prefix" : "variable", "fields" : [ { "field_name" : "expression" } ] }, { "kind_name" : "PipeVariableExpression", "type_name" : "pipe_variable_expression", "description" : "pipe_variable", "prefix" : "pipe_variable", "fields" : [ { "field_name" : "expression" } ] }, { "kind_name" : "FileAttributeSpecification", "type_name" : "file_attribute_specification", "description" : "file_attribute_specification", "prefix" : "file_attribute_specification", "fields" : [ { "field_name" : "left_double_angle" }, { "field_name" : "keyword" }, { "field_name" : "colon" }, { "field_name" : "attributes" }, { "field_name" : "right_double_angle" } ] }, { "kind_name" : "EnumDeclaration", "type_name" : "enum_declaration", "description" : "enum_declaration", "prefix" : "enum", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "colon" }, { "field_name" : "base" }, { "field_name" : "type" }, { "field_name" : "left_brace" }, { "field_name" : "use_clauses" }, { "field_name" : "enumerators" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "EnumUse", "type_name" : "enum_use", "description" : "enum_use", "prefix" : "enum_use", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "names" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "Enumerator", "type_name" : "enumerator", "description" : "enumerator", "prefix" : "enumerator", "fields" : [ { "field_name" : "name" }, { "field_name" : "equal" }, { "field_name" : "value" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "EnumClassDeclaration", "type_name" : "enum_class_declaration", "description" : "enum_class_declaration", "prefix" : "enum_class", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "enum_keyword" }, { "field_name" : "class_keyword" }, { "field_name" : "name" }, { "field_name" : "colon" }, { "field_name" : "base" }, { "field_name" : "extends" }, { "field_name" : "extends_list" }, { "field_name" : "left_brace" }, { "field_name" : "elements" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "EnumClassEnumerator", "type_name" : "enum_class_enumerator", "description" : "enum_class_enumerator", "prefix" : "enum_class_enumerator", "fields" : [ { "field_name" : "modifiers" }, { "field_name" : "type" }, { "field_name" : "name" }, { "field_name" : "initializer" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "AliasDeclaration", "type_name" : "alias_declaration", "description" : "alias_declaration", "prefix" : "alias", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "module_kw_opt" }, { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "generic_parameter" }, { "field_name" : "constraint" }, { "field_name" : "equal" }, { "field_name" : "type" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ContextAliasDeclaration", "type_name" : "context_alias_declaration", "description" : "context_alias_declaration", "prefix" : "ctx_alias", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "generic_parameter" }, { "field_name" : "as_constraint" }, { "field_name" : "equal" }, { "field_name" : "context" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "CaseTypeDeclaration", "type_name" : "case_type_declaration", "description" : "case_type_declaration", "prefix" : "case_type", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "case_keyword" }, { "field_name" : "type_keyword" }, { "field_name" : "name" }, { "field_name" : "generic_parameter" }, { "field_name" : "as" }, { "field_name" : "bounds" }, { "field_name" : "equal" }, { "field_name" : "variants" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "CaseTypeVariant", "type_name" : "case_type_variant", "description" : "case_type_variant", "prefix" : "case_type_variant", "fields" : [ { "field_name" : "bar" }, { "field_name" : "type" } ] }, { "kind_name" : "PropertyDeclaration", "type_name" : "property_declaration", "description" : "property_declaration", "prefix" : "property", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "type" }, { "field_name" : "declarators" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "PropertyDeclarator", "type_name" : "property_declarator", "description" : "property_declarator", "prefix" : "property", "fields" : [ { "field_name" : "name" }, { "field_name" : "initializer" } ] }, { "kind_name" : "NamespaceDeclaration", "type_name" : "namespace_declaration", "description" : "namespace_declaration", "prefix" : "namespace", "fields" : [ { "field_name" : "header" }, { "field_name" : "body" } ] }, { "kind_name" : "NamespaceDeclarationHeader", "type_name" : "namespace_declaration_header", "description" : "namespace_declaration_header", "prefix" : "namespace", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "name" } ] }, { "kind_name" : "NamespaceBody", "type_name" : "namespace_body", "description" : "namespace_body", "prefix" : "namespace", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "declarations" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "NamespaceEmptyBody", "type_name" : "namespace_empty_body", "description" : "namespace_empty_body", "prefix" : "namespace", "fields" : [ { "field_name" : "semicolon" } ] }, { "kind_name" : "NamespaceUseDeclaration", "type_name" : "namespace_use_declaration", "description" : "namespace_use_declaration", "prefix" : "namespace_use", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "kind" }, { "field_name" : "clauses" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "NamespaceGroupUseDeclaration", "type_name" : "namespace_group_use_declaration", "description" : "namespace_group_use_declaration", "prefix" : "namespace_group_use", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "kind" }, { "field_name" : "prefix" }, { "field_name" : "left_brace" }, { "field_name" : "clauses" }, { "field_name" : "right_brace" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "NamespaceUseClause", "type_name" : "namespace_use_clause", "description" : "namespace_use_clause", "prefix" : "namespace_use", "fields" : [ { "field_name" : "clause_kind" }, { "field_name" : "name" }, { "field_name" : "as" }, { "field_name" : "alias" } ] }, { "kind_name" : "FunctionDeclaration", "type_name" : "function_declaration", "description" : "function_declaration", "prefix" : "function", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "declaration_header" }, { "field_name" : "body" } ] }, { "kind_name" : "FunctionDeclarationHeader", "type_name" : "function_declaration_header", "description" : "function_declaration_header", "prefix" : "function", "fields" : [ { "field_name" : "modifiers" }, { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameter_list" }, { "field_name" : "left_paren" }, { "field_name" : "parameter_list" }, { "field_name" : "right_paren" }, { "field_name" : "contexts" }, { "field_name" : "colon" }, { "field_name" : "readonly_return" }, { "field_name" : "type" }, { "field_name" : "where_clause" } ] }, { "kind_name" : "Contexts", "type_name" : "contexts", "description" : "contexts", "prefix" : "contexts", "fields" : [ { "field_name" : "left_bracket" }, { "field_name" : "types" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "WhereClause", "type_name" : "where_clause", "description" : "where_clause", "prefix" : "where_clause", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "constraints" } ] }, { "kind_name" : "WhereConstraint", "type_name" : "where_constraint", "description" : "where_constraint", "prefix" : "where_constraint", "fields" : [ { "field_name" : "left_type" }, { "field_name" : "operator" }, { "field_name" : "right_type" } ] }, { "kind_name" : "MethodishDeclaration", "type_name" : "methodish_declaration", "description" : "methodish_declaration", "prefix" : "methodish", "fields" : [ { "field_name" : "attribute" }, { "field_name" : "function_decl_header" }, { "field_name" : "function_body" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "MethodishTraitResolution", "type_name" : "methodish_trait_resolution", "description" : "methodish_trait_resolution", "prefix" : "methodish_trait", "fields" : [ { "field_name" : "attribute" }, { "field_name" : "function_decl_header" }, { "field_name" : "equal" }, { "field_name" : "name" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ClassishDeclaration", "type_name" : "classish_declaration", "description" : "classish_declaration", "prefix" : "classish", "fields" : [ { "field_name" : "attribute" }, { "field_name" : "modifiers" }, { "field_name" : "xhp" }, { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameters" }, { "field_name" : "extends_keyword" }, { "field_name" : "extends_list" }, { "field_name" : "implements_keyword" }, { "field_name" : "implements_list" }, { "field_name" : "where_clause" }, { "field_name" : "body" } ] }, { "kind_name" : "ClassishBody", "type_name" : "classish_body", "description" : "classish_body", "prefix" : "classish_body", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "elements" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "TraitUse", "type_name" : "trait_use", "description" : "trait_use", "prefix" : "trait_use", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "names" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "RequireClause", "type_name" : "require_clause", "description" : "require_clause", "prefix" : "require", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "kind" }, { "field_name" : "name" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ConstDeclaration", "type_name" : "const_declaration", "description" : "const_declaration", "prefix" : "const", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "keyword" }, { "field_name" : "type_specifier" }, { "field_name" : "declarators" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ConstantDeclarator", "type_name" : "constant_declarator", "description" : "constant_declarator", "prefix" : "constant_declarator", "fields" : [ { "field_name" : "name" }, { "field_name" : "initializer" } ] }, { "kind_name" : "TypeConstDeclaration", "type_name" : "type_const_declaration", "description" : "type_const_declaration", "prefix" : "type_const", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "modifiers" }, { "field_name" : "keyword" }, { "field_name" : "type_keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameters" }, { "field_name" : "type_constraints" }, { "field_name" : "equal" }, { "field_name" : "type_specifier" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ContextConstDeclaration", "type_name" : "context_const_declaration", "description" : "context_const_declaration", "prefix" : "context_const", "fields" : [ { "field_name" : "modifiers" }, { "field_name" : "const_keyword" }, { "field_name" : "ctx_keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameters" }, { "field_name" : "constraint" }, { "field_name" : "equal" }, { "field_name" : "ctx_list" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "DecoratedExpression", "type_name" : "decorated_expression", "description" : "decorated_expression", "prefix" : "decorated_expression", "fields" : [ { "field_name" : "decorator" }, { "field_name" : "expression" } ] }, { "kind_name" : "ParameterDeclaration", "type_name" : "parameter_declaration", "description" : "parameter_declaration", "prefix" : "parameter", "fields" : [ { "field_name" : "attribute" }, { "field_name" : "visibility" }, { "field_name" : "call_convention" }, { "field_name" : "readonly" }, { "field_name" : "type" }, { "field_name" : "name" }, { "field_name" : "default_value" } ] }, { "kind_name" : "VariadicParameter", "type_name" : "variadic_parameter", "description" : "variadic_parameter", "prefix" : "variadic_parameter", "fields" : [ { "field_name" : "call_convention" }, { "field_name" : "type" }, { "field_name" : "ellipsis" } ] }, { "kind_name" : "OldAttributeSpecification", "type_name" : "old_attribute_specification", "description" : "old_attribute_specification", "prefix" : "old_attribute_specification", "fields" : [ { "field_name" : "left_double_angle" }, { "field_name" : "attributes" }, { "field_name" : "right_double_angle" } ] }, { "kind_name" : "AttributeSpecification", "type_name" : "attribute_specification", "description" : "attribute_specification", "prefix" : "attribute_specification", "fields" : [ { "field_name" : "attributes" } ] }, { "kind_name" : "Attribute", "type_name" : "attribute", "description" : "attribute", "prefix" : "attribute", "fields" : [ { "field_name" : "at" }, { "field_name" : "attribute_name" } ] }, { "kind_name" : "InclusionExpression", "type_name" : "inclusion_expression", "description" : "inclusion_expression", "prefix" : "inclusion", "fields" : [ { "field_name" : "require" }, { "field_name" : "filename" } ] }, { "kind_name" : "InclusionDirective", "type_name" : "inclusion_directive", "description" : "inclusion_directive", "prefix" : "inclusion", "fields" : [ { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "CompoundStatement", "type_name" : "compound_statement", "description" : "compound_statement", "prefix" : "compound", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "statements" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ExpressionStatement", "type_name" : "expression_statement", "description" : "expression_statement", "prefix" : "expression_statement", "fields" : [ { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "MarkupSection", "type_name" : "markup_section", "description" : "markup_section", "prefix" : "markup", "fields" : [ { "field_name" : "hashbang" }, { "field_name" : "suffix" } ] }, { "kind_name" : "MarkupSuffix", "type_name" : "markup_suffix", "description" : "markup_suffix", "prefix" : "markup_suffix", "fields" : [ { "field_name" : "less_than_question" }, { "field_name" : "name" } ] }, { "kind_name" : "UnsetStatement", "type_name" : "unset_statement", "description" : "unset_statement", "prefix" : "unset", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "variables" }, { "field_name" : "right_paren" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "DeclareLocalStatement", "type_name" : "declare_local_statement", "description" : "declare_local_statement", "prefix" : "declare_local", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "variable" }, { "field_name" : "colon" }, { "field_name" : "type" }, { "field_name" : "initializer" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "UsingStatementBlockScoped", "type_name" : "using_statement_block_scoped", "description" : "using_statement_block_scoped", "prefix" : "using_block", "fields" : [ { "field_name" : "await_keyword" }, { "field_name" : "using_keyword" }, { "field_name" : "left_paren" }, { "field_name" : "expressions" }, { "field_name" : "right_paren" }, { "field_name" : "body" } ] }, { "kind_name" : "UsingStatementFunctionScoped", "type_name" : "using_statement_function_scoped", "description" : "using_statement_function_scoped", "prefix" : "using_function", "fields" : [ { "field_name" : "await_keyword" }, { "field_name" : "using_keyword" }, { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "WhileStatement", "type_name" : "while_statement", "description" : "while_statement", "prefix" : "while", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "condition" }, { "field_name" : "right_paren" }, { "field_name" : "body" } ] }, { "kind_name" : "IfStatement", "type_name" : "if_statement", "description" : "if_statement", "prefix" : "if", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "condition" }, { "field_name" : "right_paren" }, { "field_name" : "statement" }, { "field_name" : "else_clause" } ] }, { "kind_name" : "ElseClause", "type_name" : "else_clause", "description" : "else_clause", "prefix" : "else", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "statement" } ] }, { "kind_name" : "TryStatement", "type_name" : "try_statement", "description" : "try_statement", "prefix" : "try", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "compound_statement" }, { "field_name" : "catch_clauses" }, { "field_name" : "finally_clause" } ] }, { "kind_name" : "CatchClause", "type_name" : "catch_clause", "description" : "catch_clause", "prefix" : "catch", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "type" }, { "field_name" : "variable" }, { "field_name" : "right_paren" }, { "field_name" : "body" } ] }, { "kind_name" : "FinallyClause", "type_name" : "finally_clause", "description" : "finally_clause", "prefix" : "finally", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "body" } ] }, { "kind_name" : "DoStatement", "type_name" : "do_statement", "description" : "do_statement", "prefix" : "do", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "body" }, { "field_name" : "while_keyword" }, { "field_name" : "left_paren" }, { "field_name" : "condition" }, { "field_name" : "right_paren" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ForStatement", "type_name" : "for_statement", "description" : "for_statement", "prefix" : "for", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "initializer" }, { "field_name" : "first_semicolon" }, { "field_name" : "control" }, { "field_name" : "second_semicolon" }, { "field_name" : "end_of_loop" }, { "field_name" : "right_paren" }, { "field_name" : "body" } ] }, { "kind_name" : "ForeachStatement", "type_name" : "foreach_statement", "description" : "foreach_statement", "prefix" : "foreach", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "collection" }, { "field_name" : "await_keyword" }, { "field_name" : "as" }, { "field_name" : "key" }, { "field_name" : "arrow" }, { "field_name" : "value" }, { "field_name" : "right_paren" }, { "field_name" : "body" } ] }, { "kind_name" : "SwitchStatement", "type_name" : "switch_statement", "description" : "switch_statement", "prefix" : "switch", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "expression" }, { "field_name" : "right_paren" }, { "field_name" : "left_brace" }, { "field_name" : "sections" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "SwitchSection", "type_name" : "switch_section", "description" : "switch_section", "prefix" : "switch_section", "fields" : [ { "field_name" : "labels" }, { "field_name" : "statements" }, { "field_name" : "fallthrough" } ] }, { "kind_name" : "SwitchFallthrough", "type_name" : "switch_fallthrough", "description" : "switch_fallthrough", "prefix" : "fallthrough", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "CaseLabel", "type_name" : "case_label", "description" : "case_label", "prefix" : "case", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "expression" }, { "field_name" : "colon" } ] }, { "kind_name" : "DefaultLabel", "type_name" : "default_label", "description" : "default_label", "prefix" : "default", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "colon" } ] }, { "kind_name" : "MatchStatement", "type_name" : "match_statement", "description" : "match_statement", "prefix" : "match_statement", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "expression" }, { "field_name" : "right_paren" }, { "field_name" : "left_brace" }, { "field_name" : "arms" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "MatchStatementArm", "type_name" : "match_statement_arm", "description" : "match_statement_arm", "prefix" : "match_statement_arm", "fields" : [ { "field_name" : "pattern" }, { "field_name" : "arrow" }, { "field_name" : "body" } ] }, { "kind_name" : "ReturnStatement", "type_name" : "return_statement", "description" : "return_statement", "prefix" : "return", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "YieldBreakStatement", "type_name" : "yield_break_statement", "description" : "yield_break_statement", "prefix" : "yield_break", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "break" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ThrowStatement", "type_name" : "throw_statement", "description" : "throw_statement", "prefix" : "throw", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "BreakStatement", "type_name" : "break_statement", "description" : "break_statement", "prefix" : "break", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ContinueStatement", "type_name" : "continue_statement", "description" : "continue_statement", "prefix" : "continue", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "EchoStatement", "type_name" : "echo_statement", "description" : "echo_statement", "prefix" : "echo", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "expressions" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "ConcurrentStatement", "type_name" : "concurrent_statement", "description" : "concurrent_statement", "prefix" : "concurrent", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "statement" } ] }, { "kind_name" : "SimpleInitializer", "type_name" : "simple_initializer", "description" : "simple_initializer", "prefix" : "simple_initializer", "fields" : [ { "field_name" : "equal" }, { "field_name" : "value" } ] }, { "kind_name" : "AnonymousClass", "type_name" : "anonymous_class", "description" : "anonymous_class", "prefix" : "anonymous_class", "fields" : [ { "field_name" : "class_keyword" }, { "field_name" : "left_paren" }, { "field_name" : "argument_list" }, { "field_name" : "right_paren" }, { "field_name" : "extends_keyword" }, { "field_name" : "extends_list" }, { "field_name" : "implements_keyword" }, { "field_name" : "implements_list" }, { "field_name" : "body" } ] }, { "kind_name" : "AnonymousFunction", "type_name" : "anonymous_function", "description" : "anonymous_function", "prefix" : "anonymous", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "async_keyword" }, { "field_name" : "function_keyword" }, { "field_name" : "left_paren" }, { "field_name" : "parameters" }, { "field_name" : "right_paren" }, { "field_name" : "ctx_list" }, { "field_name" : "colon" }, { "field_name" : "readonly_return" }, { "field_name" : "type" }, { "field_name" : "use" }, { "field_name" : "body" } ] }, { "kind_name" : "AnonymousFunctionUseClause", "type_name" : "anonymous_function_use_clause", "description" : "anonymous_function_use_clause", "prefix" : "anonymous_use", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "variables" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "VariablePattern", "type_name" : "variable_pattern", "description" : "variable_pattern", "prefix" : "variable_pattern", "fields" : [ { "field_name" : "variable" } ] }, { "kind_name" : "ConstructorPattern", "type_name" : "constructor_pattern", "description" : "constructor_pattern", "prefix" : "constructor_pattern", "fields" : [ { "field_name" : "constructor" }, { "field_name" : "left_paren" }, { "field_name" : "members" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "RefinementPattern", "type_name" : "refinement_pattern", "description" : "refinement_pattern", "prefix" : "refinement_pattern", "fields" : [ { "field_name" : "variable" }, { "field_name" : "colon" }, { "field_name" : "specifier" } ] }, { "kind_name" : "LambdaExpression", "type_name" : "lambda_expression", "description" : "lambda_expression", "prefix" : "lambda", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "async" }, { "field_name" : "signature" }, { "field_name" : "arrow" }, { "field_name" : "body" } ] }, { "kind_name" : "LambdaSignature", "type_name" : "lambda_signature", "description" : "lambda_signature", "prefix" : "lambda", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "parameters" }, { "field_name" : "right_paren" }, { "field_name" : "contexts" }, { "field_name" : "colon" }, { "field_name" : "readonly_return" }, { "field_name" : "type" } ] }, { "kind_name" : "CastExpression", "type_name" : "cast_expression", "description" : "cast_expression", "prefix" : "cast", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "type" }, { "field_name" : "right_paren" }, { "field_name" : "operand" } ] }, { "kind_name" : "ScopeResolutionExpression", "type_name" : "scope_resolution_expression", "description" : "scope_resolution_expression", "prefix" : "scope_resolution", "fields" : [ { "field_name" : "qualifier" }, { "field_name" : "operator" }, { "field_name" : "name" } ] }, { "kind_name" : "MemberSelectionExpression", "type_name" : "member_selection_expression", "description" : "member_selection_expression", "prefix" : "member", "fields" : [ { "field_name" : "object" }, { "field_name" : "operator" }, { "field_name" : "name" } ] }, { "kind_name" : "SafeMemberSelectionExpression", "type_name" : "safe_member_selection_expression", "description" : "safe_member_selection_expression", "prefix" : "safe_member", "fields" : [ { "field_name" : "object" }, { "field_name" : "operator" }, { "field_name" : "name" } ] }, { "kind_name" : "EmbeddedMemberSelectionExpression", "type_name" : "embedded_member_selection_expression", "description" : "embedded_member_selection_expression", "prefix" : "embedded_member", "fields" : [ { "field_name" : "object" }, { "field_name" : "operator" }, { "field_name" : "name" } ] }, { "kind_name" : "YieldExpression", "type_name" : "yield_expression", "description" : "yield_expression", "prefix" : "yield", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "operand" } ] }, { "kind_name" : "PrefixUnaryExpression", "type_name" : "prefix_unary_expression", "description" : "prefix_unary_expression", "prefix" : "prefix_unary", "fields" : [ { "field_name" : "operator" }, { "field_name" : "operand" } ] }, { "kind_name" : "PostfixUnaryExpression", "type_name" : "postfix_unary_expression", "description" : "postfix_unary_expression", "prefix" : "postfix_unary", "fields" : [ { "field_name" : "operand" }, { "field_name" : "operator" } ] }, { "kind_name" : "BinaryExpression", "type_name" : "binary_expression", "description" : "binary_expression", "prefix" : "binary", "fields" : [ { "field_name" : "left_operand" }, { "field_name" : "operator" }, { "field_name" : "right_operand" } ] }, { "kind_name" : "IsExpression", "type_name" : "is_expression", "description" : "is_expression", "prefix" : "is", "fields" : [ { "field_name" : "left_operand" }, { "field_name" : "operator" }, { "field_name" : "right_operand" } ] }, { "kind_name" : "AsExpression", "type_name" : "as_expression", "description" : "as_expression", "prefix" : "as", "fields" : [ { "field_name" : "left_operand" }, { "field_name" : "operator" }, { "field_name" : "right_operand" } ] }, { "kind_name" : "NullableAsExpression", "type_name" : "nullable_as_expression", "description" : "nullable_as_expression", "prefix" : "nullable_as", "fields" : [ { "field_name" : "left_operand" }, { "field_name" : "operator" }, { "field_name" : "right_operand" } ] }, { "kind_name" : "UpcastExpression", "type_name" : "upcast_expression", "description" : "upcast_expression", "prefix" : "upcast", "fields" : [ { "field_name" : "left_operand" }, { "field_name" : "operator" }, { "field_name" : "right_operand" } ] }, { "kind_name" : "ConditionalExpression", "type_name" : "conditional_expression", "description" : "conditional_expression", "prefix" : "conditional", "fields" : [ { "field_name" : "test" }, { "field_name" : "question" }, { "field_name" : "consequence" }, { "field_name" : "colon" }, { "field_name" : "alternative" } ] }, { "kind_name" : "EvalExpression", "type_name" : "eval_expression", "description" : "eval_expression", "prefix" : "eval", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "argument" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "IssetExpression", "type_name" : "isset_expression", "description" : "isset_expression", "prefix" : "isset", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "argument_list" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "FunctionCallExpression", "type_name" : "function_call_expression", "description" : "function_call_expression", "prefix" : "function_call", "fields" : [ { "field_name" : "receiver" }, { "field_name" : "type_args" }, { "field_name" : "left_paren" }, { "field_name" : "argument_list" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "FunctionPointerExpression", "type_name" : "function_pointer_expression", "description" : "function_pointer_expression", "prefix" : "function_pointer", "fields" : [ { "field_name" : "receiver" }, { "field_name" : "type_args" } ] }, { "kind_name" : "ParenthesizedExpression", "type_name" : "parenthesized_expression", "description" : "parenthesized_expression", "prefix" : "parenthesized_expression", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "expression" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "BracedExpression", "type_name" : "braced_expression", "description" : "braced_expression", "prefix" : "braced_expression", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "expression" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ETSpliceExpression", "type_name" : "et_splice_expression", "description" : "et_splice_expression", "prefix" : "et_splice_expression", "fields" : [ { "field_name" : "dollar" }, { "field_name" : "left_brace" }, { "field_name" : "expression" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "EmbeddedBracedExpression", "type_name" : "embedded_braced_expression", "description" : "embedded_braced_expression", "prefix" : "embedded_braced_expression", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "expression" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ListExpression", "type_name" : "list_expression", "description" : "list_expression", "prefix" : "list", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "members" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "CollectionLiteralExpression", "type_name" : "collection_literal_expression", "description" : "collection_literal_expression", "prefix" : "collection_literal", "fields" : [ { "field_name" : "name" }, { "field_name" : "left_brace" }, { "field_name" : "initializers" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ObjectCreationExpression", "type_name" : "object_creation_expression", "description" : "object_creation_expression", "prefix" : "object_creation", "fields" : [ { "field_name" : "new_keyword" }, { "field_name" : "object" } ] }, { "kind_name" : "ConstructorCall", "type_name" : "constructor_call", "description" : "constructor_call", "prefix" : "constructor_call", "fields" : [ { "field_name" : "type" }, { "field_name" : "left_paren" }, { "field_name" : "argument_list" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "DarrayIntrinsicExpression", "type_name" : "darray_intrinsic_expression", "description" : "darray_intrinsic_expression", "prefix" : "darray_intrinsic", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "explicit_type" }, { "field_name" : "left_bracket" }, { "field_name" : "members" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "DictionaryIntrinsicExpression", "type_name" : "dictionary_intrinsic_expression", "description" : "dictionary_intrinsic_expression", "prefix" : "dictionary_intrinsic", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "explicit_type" }, { "field_name" : "left_bracket" }, { "field_name" : "members" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "KeysetIntrinsicExpression", "type_name" : "keyset_intrinsic_expression", "description" : "keyset_intrinsic_expression", "prefix" : "keyset_intrinsic", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "explicit_type" }, { "field_name" : "left_bracket" }, { "field_name" : "members" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "VarrayIntrinsicExpression", "type_name" : "varray_intrinsic_expression", "description" : "varray_intrinsic_expression", "prefix" : "varray_intrinsic", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "explicit_type" }, { "field_name" : "left_bracket" }, { "field_name" : "members" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "VectorIntrinsicExpression", "type_name" : "vector_intrinsic_expression", "description" : "vector_intrinsic_expression", "prefix" : "vector_intrinsic", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "explicit_type" }, { "field_name" : "left_bracket" }, { "field_name" : "members" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "ElementInitializer", "type_name" : "element_initializer", "description" : "element_initializer", "prefix" : "element", "fields" : [ { "field_name" : "key" }, { "field_name" : "arrow" }, { "field_name" : "value" } ] }, { "kind_name" : "SubscriptExpression", "type_name" : "subscript_expression", "description" : "subscript_expression", "prefix" : "subscript", "fields" : [ { "field_name" : "receiver" }, { "field_name" : "left_bracket" }, { "field_name" : "index" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "EmbeddedSubscriptExpression", "type_name" : "embedded_subscript_expression", "description" : "embedded_subscript_expression", "prefix" : "embedded_subscript", "fields" : [ { "field_name" : "receiver" }, { "field_name" : "left_bracket" }, { "field_name" : "index" }, { "field_name" : "right_bracket" } ] }, { "kind_name" : "AwaitableCreationExpression", "type_name" : "awaitable_creation_expression", "description" : "awaitable_creation_expression", "prefix" : "awaitable", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "async" }, { "field_name" : "compound_statement" } ] }, { "kind_name" : "XHPChildrenDeclaration", "type_name" : "xhp_children_declaration", "description" : "xhp_children_declaration", "prefix" : "xhp_children", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "expression" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "XHPChildrenParenthesizedList", "type_name" : "xhp_children_parenthesized_list", "description" : "xhp_children_parenthesized_list", "prefix" : "xhp_children_list", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "xhp_children" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "XHPCategoryDeclaration", "type_name" : "xhp_category_declaration", "description" : "xhp_category_declaration", "prefix" : "xhp_category", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "categories" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "XHPEnumType", "type_name" : "xhp_enum_type", "description" : "xhp_enum_type", "prefix" : "xhp_enum", "fields" : [ { "field_name" : "like" }, { "field_name" : "keyword" }, { "field_name" : "left_brace" }, { "field_name" : "values" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "XHPLateinit", "type_name" : "xhp_lateinit", "description" : "xhp_lateinit", "prefix" : "xhp_lateinit", "fields" : [ { "field_name" : "at" }, { "field_name" : "keyword" } ] }, { "kind_name" : "XHPRequired", "type_name" : "xhp_required", "description" : "xhp_required", "prefix" : "xhp_required", "fields" : [ { "field_name" : "at" }, { "field_name" : "keyword" } ] }, { "kind_name" : "XHPClassAttributeDeclaration", "type_name" : "xhp_class_attribute_declaration", "description" : "xhp_class_attribute_declaration", "prefix" : "xhp_attribute", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "attributes" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "XHPClassAttribute", "type_name" : "xhp_class_attribute", "description" : "xhp_class_attribute", "prefix" : "xhp_attribute_decl", "fields" : [ { "field_name" : "type" }, { "field_name" : "name" }, { "field_name" : "initializer" }, { "field_name" : "required" } ] }, { "kind_name" : "XHPSimpleClassAttribute", "type_name" : "xhp_simple_class_attribute", "description" : "xhp_simple_class_attribute", "prefix" : "xhp_simple_class_attribute", "fields" : [ { "field_name" : "type" } ] }, { "kind_name" : "XHPSimpleAttribute", "type_name" : "xhp_simple_attribute", "description" : "xhp_simple_attribute", "prefix" : "xhp_simple_attribute", "fields" : [ { "field_name" : "name" }, { "field_name" : "equal" }, { "field_name" : "expression" } ] }, { "kind_name" : "XHPSpreadAttribute", "type_name" : "xhp_spread_attribute", "description" : "xhp_spread_attribute", "prefix" : "xhp_spread_attribute", "fields" : [ { "field_name" : "left_brace" }, { "field_name" : "spread_operator" }, { "field_name" : "expression" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "XHPOpen", "type_name" : "xhp_open", "description" : "xhp_open", "prefix" : "xhp_open", "fields" : [ { "field_name" : "left_angle" }, { "field_name" : "name" }, { "field_name" : "attributes" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "XHPExpression", "type_name" : "xhp_expression", "description" : "xhp_expression", "prefix" : "xhp", "fields" : [ { "field_name" : "open" }, { "field_name" : "body" }, { "field_name" : "close" } ] }, { "kind_name" : "XHPClose", "type_name" : "xhp_close", "description" : "xhp_close", "prefix" : "xhp_close", "fields" : [ { "field_name" : "left_angle" }, { "field_name" : "name" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "TypeConstant", "type_name" : "type_constant", "description" : "type_constant", "prefix" : "type_constant", "fields" : [ { "field_name" : "left_type" }, { "field_name" : "separator" }, { "field_name" : "right_type" } ] }, { "kind_name" : "VectorTypeSpecifier", "type_name" : "vector_type_specifier", "description" : "vector_type_specifier", "prefix" : "vector_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "type" }, { "field_name" : "trailing_comma" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "KeysetTypeSpecifier", "type_name" : "keyset_type_specifier", "description" : "keyset_type_specifier", "prefix" : "keyset_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "type" }, { "field_name" : "trailing_comma" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "TupleTypeExplicitSpecifier", "type_name" : "tuple_type_explicit_specifier", "description" : "tuple_type_explicit_specifier", "prefix" : "tuple_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "types" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "VarrayTypeSpecifier", "type_name" : "varray_type_specifier", "description" : "varray_type_specifier", "prefix" : "varray", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "type" }, { "field_name" : "trailing_comma" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "FunctionCtxTypeSpecifier", "type_name" : "function_ctx_type_specifier", "description" : "function_ctx_type_specifier", "prefix" : "function_ctx_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "variable" } ] }, { "kind_name" : "TypeParameter", "type_name" : "type_parameter", "description" : "type_parameter", "prefix" : "type", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "reified" }, { "field_name" : "variance" }, { "field_name" : "name" }, { "field_name" : "param_params" }, { "field_name" : "constraints" } ] }, { "kind_name" : "TypeConstraint", "type_name" : "type_constraint", "description" : "type_constraint", "prefix" : "constraint", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "type" } ] }, { "kind_name" : "ContextConstraint", "type_name" : "context_constraint", "description" : "context_constraint", "prefix" : "ctx_constraint", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "ctx_list" } ] }, { "kind_name" : "DarrayTypeSpecifier", "type_name" : "darray_type_specifier", "description" : "darray_type_specifier", "prefix" : "darray", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "key" }, { "field_name" : "comma" }, { "field_name" : "value" }, { "field_name" : "trailing_comma" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "DictionaryTypeSpecifier", "type_name" : "dictionary_type_specifier", "description" : "dictionary_type_specifier", "prefix" : "dictionary_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "members" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "ClosureTypeSpecifier", "type_name" : "closure_type_specifier", "description" : "closure_type_specifier", "prefix" : "closure", "fields" : [ { "field_name" : "outer_left_paren" }, { "field_name" : "readonly_keyword" }, { "field_name" : "function_keyword" }, { "field_name" : "inner_left_paren" }, { "field_name" : "parameter_list" }, { "field_name" : "inner_right_paren" }, { "field_name" : "contexts" }, { "field_name" : "colon" }, { "field_name" : "readonly_return" }, { "field_name" : "return_type" }, { "field_name" : "outer_right_paren" } ] }, { "kind_name" : "ClosureParameterTypeSpecifier", "type_name" : "closure_parameter_type_specifier", "description" : "closure_parameter_type_specifier", "prefix" : "closure_parameter", "fields" : [ { "field_name" : "call_convention" }, { "field_name" : "readonly" }, { "field_name" : "type" } ] }, { "kind_name" : "TypeRefinement", "type_name" : "type_refinement", "description" : "type_refinement", "prefix" : "type_refinement", "fields" : [ { "field_name" : "type" }, { "field_name" : "keyword" }, { "field_name" : "left_brace" }, { "field_name" : "members" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "TypeInRefinement", "type_name" : "type_in_refinement", "description" : "type_in_refinement", "prefix" : "type_in_refinement", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameters" }, { "field_name" : "constraints" }, { "field_name" : "equal" }, { "field_name" : "type" } ] }, { "kind_name" : "CtxInRefinement", "type_name" : "ctx_in_refinement", "description" : "ctx_in_refinement", "prefix" : "ctx_in_refinement", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "name" }, { "field_name" : "type_parameters" }, { "field_name" : "constraints" }, { "field_name" : "equal" }, { "field_name" : "ctx_list" } ] }, { "kind_name" : "ClassnameTypeSpecifier", "type_name" : "classname_type_specifier", "description" : "classname_type_specifier", "prefix" : "classname", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_angle" }, { "field_name" : "type" }, { "field_name" : "trailing_comma" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "FieldSpecifier", "type_name" : "field_specifier", "description" : "field_specifier", "prefix" : "field", "fields" : [ { "field_name" : "question" }, { "field_name" : "name" }, { "field_name" : "arrow" }, { "field_name" : "type" } ] }, { "kind_name" : "FieldInitializer", "type_name" : "field_initializer", "description" : "field_initializer", "prefix" : "field_initializer", "fields" : [ { "field_name" : "name" }, { "field_name" : "arrow" }, { "field_name" : "value" } ] }, { "kind_name" : "ShapeTypeSpecifier", "type_name" : "shape_type_specifier", "description" : "shape_type_specifier", "prefix" : "shape_type", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "fields" }, { "field_name" : "ellipsis" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "ShapeExpression", "type_name" : "shape_expression", "description" : "shape_expression", "prefix" : "shape_expression", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "fields" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "TupleExpression", "type_name" : "tuple_expression", "description" : "tuple_expression", "prefix" : "tuple_expression", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "left_paren" }, { "field_name" : "items" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "GenericTypeSpecifier", "type_name" : "generic_type_specifier", "description" : "generic_type_specifier", "prefix" : "generic", "fields" : [ { "field_name" : "class_type" }, { "field_name" : "argument_list" } ] }, { "kind_name" : "NullableTypeSpecifier", "type_name" : "nullable_type_specifier", "description" : "nullable_type_specifier", "prefix" : "nullable", "fields" : [ { "field_name" : "question" }, { "field_name" : "type" } ] }, { "kind_name" : "LikeTypeSpecifier", "type_name" : "like_type_specifier", "description" : "like_type_specifier", "prefix" : "like", "fields" : [ { "field_name" : "tilde" }, { "field_name" : "type" } ] }, { "kind_name" : "SoftTypeSpecifier", "type_name" : "soft_type_specifier", "description" : "soft_type_specifier", "prefix" : "soft", "fields" : [ { "field_name" : "at" }, { "field_name" : "type" } ] }, { "kind_name" : "AttributizedSpecifier", "type_name" : "attributized_specifier", "description" : "attributized_specifier", "prefix" : "attributized_specifier", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "type" } ] }, { "kind_name" : "ReifiedTypeArgument", "type_name" : "reified_type_argument", "description" : "reified_type_argument", "prefix" : "reified_type_argument", "fields" : [ { "field_name" : "reified" }, { "field_name" : "type" } ] }, { "kind_name" : "TypeArguments", "type_name" : "type_arguments", "description" : "type_arguments", "prefix" : "type_arguments", "fields" : [ { "field_name" : "left_angle" }, { "field_name" : "types" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "TypeParameters", "type_name" : "type_parameters", "description" : "type_parameters", "prefix" : "type_parameters", "fields" : [ { "field_name" : "left_angle" }, { "field_name" : "parameters" }, { "field_name" : "right_angle" } ] }, { "kind_name" : "TupleTypeSpecifier", "type_name" : "tuple_type_specifier", "description" : "tuple_type_specifier", "prefix" : "tuple", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "types" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "UnionTypeSpecifier", "type_name" : "union_type_specifier", "description" : "union_type_specifier", "prefix" : "union", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "types" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "IntersectionTypeSpecifier", "type_name" : "intersection_type_specifier", "description" : "intersection_type_specifier", "prefix" : "intersection", "fields" : [ { "field_name" : "left_paren" }, { "field_name" : "types" }, { "field_name" : "right_paren" } ] }, { "kind_name" : "ErrorSyntax", "type_name" : "error", "description" : "error", "prefix" : "error", "fields" : [ { "field_name" : "error" } ] }, { "kind_name" : "ListItem", "type_name" : "list_item", "description" : "list_item", "prefix" : "list", "fields" : [ { "field_name" : "item" }, { "field_name" : "separator" } ] }, { "kind_name" : "EnumClassLabelExpression", "type_name" : "enum_class_label_expression", "description" : "enum_class_label", "prefix" : "enum_class_label", "fields" : [ { "field_name" : "qualifier" }, { "field_name" : "hash" }, { "field_name" : "expression" } ] }, { "kind_name" : "ModuleDeclaration", "type_name" : "module_declaration", "description" : "module_declaration", "prefix" : "module_declaration", "fields" : [ { "field_name" : "attribute_spec" }, { "field_name" : "new_keyword" }, { "field_name" : "module_keyword" }, { "field_name" : "name" }, { "field_name" : "left_brace" }, { "field_name" : "exports" }, { "field_name" : "imports" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ModuleExports", "type_name" : "module_exports", "description" : "module_exports", "prefix" : "module_exports", "fields" : [ { "field_name" : "exports_keyword" }, { "field_name" : "left_brace" }, { "field_name" : "exports" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ModuleImports", "type_name" : "module_imports", "description" : "module_imports", "prefix" : "module_imports", "fields" : [ { "field_name" : "imports_keyword" }, { "field_name" : "left_brace" }, { "field_name" : "imports" }, { "field_name" : "right_brace" } ] }, { "kind_name" : "ModuleMembershipDeclaration", "type_name" : "module_membership_declaration", "description" : "module_membership_declaration", "prefix" : "module_membership_declaration", "fields" : [ { "field_name" : "module_keyword" }, { "field_name" : "name" }, { "field_name" : "semicolon" } ] }, { "kind_name" : "PackageExpression", "type_name" : "package_expression", "description" : "package_expression", "prefix" : "package_expression", "fields" : [ { "field_name" : "keyword" }, { "field_name" : "name" } ] }, { "kind_name" : "Token", "type_name" : "token", "description" : "token", "prefix" : "", "fields" : [ { "field_name" : "leading" }, { "field_name" : "trailing" } ] }, { "kind_name" : "Missing", "type_name" : "missing", "description" : "missing", "prefix" : "", "fields" : [ ] }, { "kind_name" : "SyntaxList", "type_name" : "syntax_list", "description" : "syntax_list", "prefix" : "", "fields" : [ ] } ] }
TOML
hhvm/hphp/hack/src/parser/lowerer/Cargo.toml
# @generated by autocargo [package] name = "lowerer" version = "0.0.0" edition = "2021" [lib] path = "lib.rs" [dependencies] bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] } bumpalo = { version = "3.11.1", features = ["collections"] } escaper = { version = "0.0.0", path = "../../utils/escaper" } hash = { version = "0.0.0", path = "../../utils/hash" } html_entities = { version = "0.0.0", path = "../../utils/html_entities" } itertools = "0.10.3" lazy_static = "1.4" lint_rust = { version = "0.0.0", path = "../../utils/lint" } naming_special_names_rust = { version = "0.0.0", path = "../../naming" } ocaml_helper = { version = "0.0.0", path = "../../utils/ocaml_helper" } oxidized = { version = "0.0.0", path = "../../oxidized" } parser_core_types = { version = "0.0.0", path = "../cargo/core_types" } regex = "1.9.2" relative_path = { version = "0.0.0", path = "../../utils/rust/relative_path" } rescan_trivia = { version = "0.0.0", path = "../api/cargo/rescan_trivia" } stack_limit = { version = "0.0.0", path = "../../utils/stack_limit" } thiserror = "1.0.43"
Rust
hhvm/hphp/hack/src/parser/lowerer/desugar_expression_tree.rs
use bstr::BString; use naming_special_names_rust::classes; use naming_special_names_rust::expression_trees as et; use naming_special_names_rust::pseudo_functions; use naming_special_names_rust::special_idents; use oxidized::aast; use oxidized::aast_visitor::visit; use oxidized::aast_visitor::visit_mut; use oxidized::aast_visitor::AstParams; use oxidized::aast_visitor::Node; use oxidized::aast_visitor::NodeMut; use oxidized::aast_visitor::Visitor; use oxidized::aast_visitor::VisitorMut; use oxidized::ast; use oxidized::ast::ClassId; use oxidized::ast::ClassId_; use oxidized::ast::Expr; use oxidized::ast::Expr_; use oxidized::ast::Hint_; use oxidized::ast::Sid; use oxidized::ast::Stmt; use oxidized::ast::Stmt_; use oxidized::ast_defs; use oxidized::ast_defs::*; use oxidized::local_id; use oxidized::pos::Pos; use crate::lowerer::Env; pub struct DesugarResult { pub expr: Expr, pub errors: Vec<(Pos, String)>, } struct RewriteResult { virtual_expr: Expr, desugar_expr: Expr, } /// Rewrite the contents of an expression tree literal into an /// expression on a visitor class. /// /// Given the following expression tree: /// ``` /// MyDsl`foo(1) + ${ $x }`; /// ``` /// /// First, the splices are extracted and assigned to temporary variables: /// ``` /// { $0splice0 = $x; } /// ``` /// /// Then the expression is virtualized as virtualized_expr /// ``` /// MyDsl::symbolType(foo<>)(MyDsl::intType())->__plus( ${ $0splice0 } ) /// ``` /// Where virtualized_expr is used in helping type Expression Trees /// /// Finally, the expression is desugared as runtime_expr /// ``` /// MyDsl::makeTree( /// // At runtime, expression tree visitors know the position of the literal. /// shape('path' => 'whatever.php', 'start_line' => 123, ...), /// /// // We provide metadata of values used inside the visitor, so users can access /// // spliced or called values without having to re-run the visit method. /// shape( /// 'splices' => dict['$0splice0' => $0splice0], /// 'functions' => vec[foo<>], /// 'static_methods' => vec[], /// ) /// /// (MyDsl $0v) ==> { /// $0v->visitBinop( /// // (ignoring ExprPos arguments for brevity) /// $0v->visitCall( /// $0v->visitGlobalFunction(foo<>), /// vec[$0v->visitInt(1)], /// ), /// '__plus', /// $0v->splice('$0splice0', $0splice0), /// ) /// }, /// ) /// ``` /// Which is the runtime representation of the Expression Tree pub fn desugar(hint: &aast::Hint, e: Expr, env: &Env<'_>) -> DesugarResult { let mut errors = vec![]; let visitor_name = match hint_name(hint) { Ok(name) => name, Err((pos, msg)) => { errors.push((pos, msg)); "unknown".into() } }; let et_literal_pos = e.1.clone(); let et_hint_pos = hint.0.clone(); let mut temps = Temporaries { splices: vec![], global_function_pointers: vec![], static_method_pointers: vec![], }; let rewritten_expr = rewrite_expr( &mut temps, e, &visitor_name, &mut errors, env.parser_options.tco_expression_tree_virtualize_functions, ); let dollardollar_pos = rewrite_dollardollars(&mut temps.splices); let splice_count = temps.splices.len(); let function_count = temps.global_function_pointers.len(); let static_method_count = temps.static_method_pointers.len(); let metadata = maketree_metadata( &et_hint_pos, &temps.splices, &temps.global_function_pointers, &temps.static_method_pointers, ); // Make anonymous function of smart constructor calls let visitor_expr = wrap_return(rewritten_expr.desugar_expr, &et_literal_pos); let visitor_body = ast::FuncBody { fb_ast: ast::Block(vec![visitor_expr]), }; let param = ast::FunParam { annotation: (), type_hint: ast::TypeHint((), Some(hint.clone())), is_variadic: false, pos: hint.0.clone(), name: visitor_variable(), expr: None, callconv: ParamKind::Pnormal, readonly: None, user_attributes: Default::default(), visibility: None, }; let visitor_fun_ = wrap_fun_(visitor_body, vec![param], et_literal_pos.clone()); let visitor_lambda = Expr::new( (), et_literal_pos.clone(), Expr_::mk_lfun(visitor_fun_, vec![]), ); // Wrap this in an Efun with appropriate variables for typing. // This enables us to report unbound variables correctly. let virtualized_expr = { let typing_fun_body = ast::FuncBody { fb_ast: ast::Block(vec![wrap_return( rewritten_expr.virtual_expr, &et_literal_pos, )]), }; let typing_fun_ = wrap_fun_(typing_fun_body, vec![], et_literal_pos.clone()); let mut spliced_vars: Vec<_> = (0..splice_count) .map(|i| { ast::CaptureLid( (), ast::Lid(et_hint_pos.clone(), (0, temp_splice_lvar_string(i))), ) }) .collect(); let function_pointer_vars: Vec<_> = (0..function_count) .map(|i| { ast::CaptureLid( (), ast::Lid( et_hint_pos.clone(), (0, temp_function_pointer_lvar_string(i)), ), ) }) .collect(); let static_method_vars: Vec<_> = (0..static_method_count) .map(|i| { ast::CaptureLid( (), ast::Lid(et_hint_pos.clone(), (0, temp_static_method_lvar_string(i))), ) }) .collect(); spliced_vars.extend(function_pointer_vars); spliced_vars.extend(static_method_vars); Expr::new( (), et_literal_pos.clone(), Expr_::Call(Box::new(ast::CallExpr { func: Expr::new( (), et_literal_pos.clone(), Expr_::mk_efun(aast::Efun { fun: typing_fun_, use_: spliced_vars, closure_class_name: None, }), ), targs: vec![], args: vec![], unpacked_arg: None, })), ) }; // Create assignment of the extracted expressions to temporary variables // `$0splice0 = spliced_expr0;` let splice_assignments: Vec<Stmt> = create_temp_statements(temps.splices, temp_splice_lvar); // `$0fp0 = foo<>;` let function_pointer_assignments: Vec<Stmt> = create_temp_statements(temps.global_function_pointers, temp_function_pointer_lvar); // `$0sm0 = Foo::bar<>;` let static_method_assignments: Vec<Stmt> = create_temp_statements(temps.static_method_pointers, temp_static_method_lvar); let mut function_pointers = vec![]; function_pointers.extend(function_pointer_assignments); function_pointers.extend(static_method_assignments); let make_tree = static_meth_call( &visitor_name, et::MAKE_TREE, vec![exprpos(&et_literal_pos), metadata, visitor_lambda], &et_hint_pos, ); let runtime_expr = if splice_assignments.is_empty() && function_pointers.is_empty() { make_tree } else { let body = if env.codegen { let mut b = splice_assignments.clone(); b.extend(function_pointers.clone()); b.push(wrap_return(make_tree, &et_literal_pos)); b } else { vec![wrap_return(make_tree, &et_literal_pos)] }; let lambda_args = match &dollardollar_pos { Some(pipe_pos) => vec![( (et::DOLLARDOLLAR_TMP_VAR.to_string(), pipe_pos.clone()), Expr::mk_lvar(pipe_pos, special_idents::DOLLAR_DOLLAR), )], _ => vec![], }; immediately_invoked_lambda(&et_literal_pos, body, lambda_args) }; let expr = Expr::new( (), et_literal_pos, Expr_::mk_expression_tree(ast::ExpressionTree { hint: hint.clone(), splices: splice_assignments, function_pointers, virtualized_expr, runtime_expr, dollardollar_pos, }), ); DesugarResult { expr, errors } } /// Convert `foo` to `return foo;`. fn wrap_return(e: Expr, pos: &Pos) -> Stmt { Stmt::new(pos.clone(), Stmt_::Return(Box::new(Some(e)))) } /// Wrap a FuncBody into an anonymous Fun_ fn wrap_fun_(body: ast::FuncBody, params: Vec<ast::FunParam>, span: Pos) -> ast::Fun_ { ast::Fun_ { span, readonly_this: None, annotation: (), readonly_ret: None, ret: ast::TypeHint((), None), params, body, fun_kind: ast::FunKind::FSync, ctxs: None, // TODO(T70095684) unsafe_ctxs: None, // TODO(T70095684) user_attributes: Default::default(), external: false, doc_comment: None, } } struct DollarDollarRewriter { pos: Option<Pos>, } impl<'ast> VisitorMut<'ast> for DollarDollarRewriter { type Params = AstParams<(), ()>; fn object(&mut self) -> &mut dyn VisitorMut<'ast, Params = Self::Params> { self } fn visit_expr(&mut self, env: &mut (), e: &mut aast::Expr<(), ()>) -> Result<(), ()> { use aast::Expr_::*; match &mut e.2 { // Rewrite all occurrences to $0dollardollar Lvar(l) => { if local_id::get_name(&l.1) == special_idents::DOLLAR_DOLLAR { // Replace and remember the position e.2 = Lvar(Box::new(ast::Lid( e.1.clone(), local_id::make_unscoped(et::DOLLARDOLLAR_TMP_VAR), ))); if self.pos.is_none() { self.pos = Some(e.1.clone()); } } Ok(()) } // Don't need to recurse into the new scopes of lambdas Lfun(_) | Efun(_) => Ok(()), // Don't recurse into Expression Trees ExpressionTree(_) | ETSplice(_) => Ok(()), // Only recurse into the left hand side of any pipe as the rhs has new $$ Pipe(p) => (&mut p.1).accept(env, self.object()), // Otherwise, recurse completely on the other expressions _ => e.recurse(env, self.object()), } } } fn rewrite_dollardollars(el: &mut [ast::Expr]) -> Option<Pos> { let mut rewriter = DollarDollarRewriter { pos: None }; for e in el.iter_mut() { visit_mut(&mut rewriter, &mut (), e).expect("DollarDollarRewriter never errors"); } rewriter.pos } struct VoidReturnCheck { only_void_return: bool, } impl<'ast> Visitor<'ast> for VoidReturnCheck { type Params = AstParams<(), ()>; fn object(&mut self) -> &mut dyn Visitor<'ast, Params = Self::Params> { self } fn visit_expr(&mut self, env: &mut (), e: &aast::Expr<(), ()>) -> Result<(), ()> { use aast::Expr_::*; match &e.2 { // Don't recurse into splices or LFuns ETSplice(_) | Lfun(_) => Ok(()), // TODO: Do we even recurse on expressions? _ => e.recurse(env, self), } } fn visit_stmt(&mut self, env: &mut (), s: &'ast aast::Stmt<(), ()>) -> Result<(), ()> { use aast::Stmt_::*; match &s.1 { Return(e) => { if (*e).is_some() { self.only_void_return = false; } Ok(()) } _ => s.recurse(env, self), } } } fn only_void_return(lfun_body: &[ast::Stmt]) -> bool { let mut checker = VoidReturnCheck { only_void_return: true, }; visit(&mut checker, &mut (), &lfun_body).unwrap(); checker.only_void_return } struct NestedSpliceCheck { has_nested_splice: Option<Pos>, has_nested_expression_tree: Option<Pos>, } impl<'ast> Visitor<'ast> for NestedSpliceCheck { type Params = AstParams<(), ()>; fn object(&mut self) -> &mut dyn Visitor<'ast, Params = Self::Params> { self } fn visit_expr(&mut self, env: &mut (), e: &aast::Expr<(), ()>) -> Result<(), ()> { use aast::Expr_::*; match &e.2 { ETSplice(_) => { self.has_nested_splice = Some(e.1.clone()); } ExpressionTree(_) => { self.has_nested_expression_tree = Some(e.1.clone()); } _ if self.has_nested_splice.is_none() && self.has_nested_expression_tree.is_none() => { e.recurse(env, self)? } _ => {} } Ok(()) } } /// Assumes that the Expr is the expression within a splice. /// If the expression has an Expression Tree contained within or a splice, then /// we have nested expression trees or splices and this should raise an error. fn check_nested_splice(e: &ast::Expr) -> Result<(), (Pos, String)> { let mut checker = NestedSpliceCheck { has_nested_splice: None, has_nested_expression_tree: None, }; visit(&mut checker, &mut (), e).unwrap(); if let Some(p) = checker.has_nested_splice { return Err((p, "Splice syntax `${...}` cannot be nested.".into())); } if let Some(p) = checker.has_nested_expression_tree { return Err((p, "Expression trees may not be nested. Consider assigning to a local variable and splicing the local variable in.".into())); } Ok(()) } fn null_literal(pos: Pos) -> Expr { Expr::new((), pos, Expr_::Null) } fn string_literal(pos: Pos, s: &str) -> Expr { Expr::new((), pos, Expr_::String(BString::from(s))) } fn int_literal(pos: Pos, i: usize) -> Expr { Expr::new((), pos, Expr_::Int(i.to_string())) } fn vec_literal(items: Vec<Expr>) -> Expr { let positions: Vec<_> = items.iter().map(|x| &x.1).collect(); let position = merge_positions(&positions); vec_literal_with_pos(&position, items) } fn vec_literal_with_pos(pos: &Pos, items: Vec<Expr>) -> Expr { Expr::new( (), pos.clone(), Expr_::ValCollection(Box::new(((pos.clone(), aast::VcKind::Vec), None, items))), ) } fn dict_literal(pos: &Pos, key_value_pairs: Vec<(Expr, Expr)>) -> Expr { let fields = key_value_pairs .into_iter() .map(|(k, v)| aast::Field(k, v)) .collect(); Expr::new( (), pos.clone(), Expr_::KeyValCollection(Box::new(((pos.clone(), aast::KvcKind::Dict), None, fields))), ) } fn make_id(pos: Pos, name: &str) -> ast::Id { ast::Id(pos, name.into()) } fn visitor_variable() -> String { "$0v".to_string() } /// Given a list of arguments, make each a "normal" argument by annotating it with /// `ParamKind::Pnormal` fn build_args(args: Vec<Expr>) -> Vec<(ParamKind, Expr)> { args.into_iter().map(|n| (ParamKind::Pnormal, n)).collect() } /// Build `$v->meth_name(args)`. fn v_meth_call(meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr { let receiver = Expr::mk_lvar(pos, &visitor_variable()); let meth = Expr::new( (), pos.clone(), Expr_::Id(Box::new(ast::Id(pos.clone(), meth_name.into()))), ); let c = Expr_::Call(Box::new(ast::CallExpr { func: Expr::new( (), pos.clone(), Expr_::ObjGet(Box::new(( receiver, meth, OgNullFlavor::OGNullthrows, ast::PropOrMethod::IsMethod, ))), ), targs: vec![], args: build_args(args), unpacked_arg: None, })); Expr::new((), pos.clone(), c) } fn meth_call(receiver: Expr, meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr { let meth = Expr::new( (), pos.clone(), Expr_::Id(Box::new(ast::Id(pos.clone(), meth_name.into()))), ); let c = Expr_::Call(Box::new(ast::CallExpr { func: Expr::new( (), pos.clone(), Expr_::ObjGet(Box::new(( receiver, meth, OgNullFlavor::OGNullthrows, ast::PropOrMethod::IsMethod, ))), ), targs: vec![], args: build_args(args), unpacked_arg: None, })); Expr::new((), pos.clone(), c) } fn static_meth_call(classname: &str, meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr { let callee = Expr::new( (), pos.clone(), Expr_::ClassConst(Box::new(( // TODO: Refactor ClassId creation with new_obj ClassId( (), pos.clone(), ClassId_::CIexpr(Expr::new( (), pos.clone(), Expr_::Id(Box::new(Id(pos.clone(), classname.to_string()))), )), ), (pos.clone(), meth_name.to_string()), ))), ); Expr::new( (), pos.clone(), Expr_::Call(Box::new(ast::CallExpr { func: callee, targs: vec![], args: build_args(args), unpacked_arg: None, })), ) } /// Join a slice of positions together into a single, larger position. fn merge_positions(positions: &[&Pos]) -> Pos { positions .iter() .fold(None, |acc, pos| match acc { Some(res) => Some(Pos::merge(&res, pos).expect("Positions should be in the same file")), None => Some((*pos).clone()), }) .unwrap_or(Pos::NONE) } fn create_temp_statements(exprs: Vec<Expr>, mk_lvar: fn(&Pos, usize) -> Expr) -> Vec<Stmt> { exprs .into_iter() .enumerate() .map(|(i, expr)| { Stmt::new( expr.1.clone(), Stmt_::Expr(Box::new(Expr::new( (), expr.1.clone(), Expr_::Binop(Box::new(aast::Binop { bop: Bop::Eq(None), lhs: mk_lvar(&expr.1, i), rhs: expr, })), ))), ) }) .collect() } fn temp_lvar_string(name: &str, num: usize) -> String { format!("$0{}{}", name, num) } fn temp_splice_lvar_string(num: usize) -> String { temp_lvar_string("splice", num) } fn temp_splice_lvar(pos: &Pos, num: usize) -> Expr { Expr::mk_lvar(pos, &temp_splice_lvar_string(num)) } fn temp_function_pointer_lvar_string(num: usize) -> String { temp_lvar_string("fp", num) } fn temp_function_pointer_lvar(pos: &Pos, num: usize) -> Expr { Expr::mk_lvar(pos, &temp_function_pointer_lvar_string(num)) } fn temp_static_method_lvar_string(num: usize) -> String { temp_lvar_string("sm", num) } fn temp_static_method_lvar(pos: &Pos, num: usize) -> Expr { Expr::mk_lvar(pos, &temp_static_method_lvar_string(num)) } /// Given a Pos, returns a shape literal expression representing it. /// /// ``` /// shape( /// 'path' => __FILE__, /// 'start_line' => 1, /// 'end_line' => 10, /// 'start_column' => 0, /// 'end_column' => 80, /// ) /// ``` /// /// If this Pos is Pos.none or invalid, return a literal null instead. fn exprpos(pos: &Pos) -> Expr { if pos.is_none() || !pos.is_valid() { null_literal(pos.clone()) } else { let ((start_lnum, start_bol, start_offset), (end_lnum, end_bol, end_offset)) = pos.to_start_and_end_lnum_bol_offset(); let fields = vec![ ( "path", Expr::new( (), pos.clone(), Expr_::Id(Box::new(make_id(pos.clone(), "__FILE__"))), ), ), ("start_line", int_literal(pos.clone(), start_lnum)), ("end_line", int_literal(pos.clone(), end_lnum)), ( "start_column", int_literal(pos.clone(), start_offset - start_bol), ), ("end_column", int_literal(pos.clone(), end_offset - end_bol)), ]; shape_literal(pos, fields) } } fn shape_literal(pos: &Pos, fields: Vec<(&str, Expr)>) -> Expr { let shape_fields: Vec<_> = fields .into_iter() .map(|(name, value)| { let bs = BString::from(name); let field_name = ShapeFieldName::SFlitStr((pos.clone(), bs)); (field_name, value) }) .collect(); Expr::new((), pos.clone(), Expr_::Shape(shape_fields)) } fn boolify(receiver: Expr) -> Expr { let pos = receiver.1.clone(); meth_call(receiver, "__bool", vec![], &pos) } struct Temporaries { splices: Vec<Expr>, global_function_pointers: Vec<Expr>, static_method_pointers: Vec<Expr>, } /// Performs both the virtualization and the desugaring in tandem /// Also extracts the expressions that need to be assigned to temporaries /// Replaces the extracted splices, function pointers, and static method pointers /// with temporary variables fn rewrite_expr( temps: &mut Temporaries, e: Expr, visitor_name: &str, errors: &mut Vec<(Pos, String)>, should_virtualize_functions: bool, ) -> RewriteResult { use aast::Expr_::*; // If we can't rewrite the expression (e.g. due to unsupported syntax), return the // original syntax unmodified. This is particularly useful during code completion, // where an unfinished code fragment might accidentally use unsupported syntax. let unchanged_result = RewriteResult { virtual_expr: e.clone(), desugar_expr: e.clone(), }; let Expr(_, pos, expr_) = e; let pos_expr = exprpos(&pos); match expr_ { // Source: MyDsl`1` // Virtualized: MyDsl::intType() // Desugared: $0v->visitInt(new ExprPos(...), 1) Int(_) => { let virtual_expr = static_meth_call(visitor_name, et::INT_TYPE, vec![], &pos); let desugar_expr = v_meth_call( et::VISIT_INT, vec![pos_expr, Expr((), pos.clone(), expr_)], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`1.0` // Virtualized: MyDsl::floatType() // Desugared: $0v->visitFloat(new ExprPos(...), 1.0) Float(_) => { let virtual_expr = static_meth_call(visitor_name, et::FLOAT_TYPE, vec![], &pos); let desugar_expr = v_meth_call( et::VISIT_FLOAT, vec![pos_expr, Expr((), pos.clone(), expr_)], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`'foo'` // Virtualized: MyDsl::stringType() // Desugared: $0v->visitString(new ExprPos(...), 'foo') String(_) => { let virtual_expr = static_meth_call(visitor_name, et::STRING_TYPE, vec![], &pos); let desugar_expr = v_meth_call( et::VISIT_STRING, vec![pos_expr, Expr((), pos.clone(), expr_)], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`true` // Virtualized: MyDsl::boolType() // Desugared: $0v->visitBool(new ExprPos(...), true) True | False => { let virtual_expr = static_meth_call(visitor_name, et::BOOL_TYPE, vec![], &pos); let desugar_expr = v_meth_call( et::VISIT_BOOL, vec![pos_expr, Expr((), pos.clone(), expr_)], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`null` // Virtualized: MyDsl::nullType() // Desugared: $0v->visitNull(new ExprPos(...)) Null => { let virtual_expr = static_meth_call(visitor_name, et::NULL_TYPE, vec![], &pos); let desugar_expr = v_meth_call(et::VISIT_NULL, vec![pos_expr], &pos); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`$x` // Virtualized: $x // Desugared: $0v->visitLocal(new ExprPos(...), '$x') Lvar(lid) => { let desugar_expr = v_meth_call( et::VISIT_LOCAL, vec![pos_expr, string_literal(lid.0.clone(), &((lid.1).1))], &pos, ); let virtual_expr = Expr((), pos, Lvar(lid)); RewriteResult { virtual_expr, desugar_expr, } } Binop(binop) => { let aast::Binop { bop, lhs, rhs } = *binop; let rewritten_lhs = rewrite_expr( temps, lhs, visitor_name, errors, should_virtualize_functions, ); let rewritten_rhs = rewrite_expr( temps, rhs, visitor_name, errors, should_virtualize_functions, ); if bop == Bop::Eq(None) { // Source: MyDsl`$x = ...` // Virtualized: $x = ... // Desugared: $0v->visitAssign(new ExprPos(...), $0v->visitLocal(...), ...) let desugar_expr = v_meth_call( et::VISIT_ASSIGN, vec![ pos_expr, rewritten_lhs.desugar_expr, rewritten_rhs.desugar_expr, ], &pos, ); let virtual_expr = Expr( (), pos, Binop(Box::new(aast::Binop { bop, lhs: rewritten_lhs.virtual_expr, rhs: rewritten_rhs.virtual_expr, })), ); RewriteResult { virtual_expr, desugar_expr, } } else { // Source: MyDsl`... + ...` // Virtualized: ...->__plus(...) // Desugared: $0v->visitBinop(new ExprPos(...), ..., '__plus', ...) let binop_str = match bop { Bop::Plus => "__plus", Bop::Minus => "__minus", Bop::Star => "__star", Bop::Slash => "__slash", Bop::Percent => "__percent", // Convert boolean &&, || Bop::Ampamp => "__ampamp", Bop::Barbar => "__barbar", // Convert comparison operators, <, <=, >, >=, ===, !== Bop::Lt => "__lessThan", Bop::Lte => "__lessThanEqual", Bop::Gt => "__greaterThan", Bop::Gte => "__greaterThanEqual", Bop::Eqeqeq => "__tripleEquals", Bop::Diff2 => "__notTripleEquals", // Convert string concatenation Bop::Dot => "__dot", // Convert bitwise operators, &, |, ^, <<, >> Bop::Amp => "__amp", Bop::Bar => "__bar", Bop::Xor => "__caret", Bop::Ltlt => "__lessThanLessThan", Bop::Gtgt => "__greaterThanGreaterThan", // Explicit list of unsupported operators and error messages Bop::Starstar => { errors.push(( pos.clone(), "Expression trees do not support the exponent operator `**`.".into(), )); "__unsupported" } Bop::Eqeq | Bop::Diff => { errors.push(( pos.clone(), "Expression trees only support strict equality operators `===` and `!==`".into(), )); "__unsupported" } Bop::Cmp => { errors.push(( pos.clone(), "Expression trees do not support the spaceship operator `<=>`. Try comparison operators like `<` and `>=`".into(), )); "__unsupported" } Bop::QuestionQuestion => { errors.push(( pos.clone(), "Expression trees do not support the null coalesce operator `??`." .into(), )); "__unsupported" } Bop::Eq(_) => { errors.push(( pos.clone(), "Expression trees do not support compound assignments. Try the long form style `$foo = $foo + $bar` instead.".into(), )); "__unsupported" } }; let virtual_expr = meth_call( rewritten_lhs.virtual_expr, binop_str, vec![rewritten_rhs.virtual_expr], &pos, ); let desugar_expr = v_meth_call( et::VISIT_BINOP, vec![ pos_expr, rewritten_lhs.desugar_expr, string_literal(pos.clone(), binop_str), rewritten_rhs.desugar_expr, ], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } } // Source: MyDsl`!...` // Virtualized: ...->__exclamationMark(...) // Desugared: $0v->visitUnop(new ExprPos(...), ..., '__exclamationMark') Unop(unop) => { let (op, operand) = *unop; let rewritten_operand = rewrite_expr( temps, operand, visitor_name, errors, should_virtualize_functions, ); let op_str = match op { // Allow boolean not operator !$x Uop::Unot => "__exclamationMark", // Allow negation -$x (required for supporting negative literals -123) Uop::Uminus => "__negate", // Allow bitwise complement Uop::Utild => "__tilde", // Currently not allowed operators Uop::Uplus => { errors.push(( pos.clone(), "Expression trees do not support the unary plus operator.".into(), )); "__unsupported" } // Postfix ++ Uop::Upincr => "__postfixPlusPlus", // Prefix ++ Uop::Uincr => { errors.push(( pos.clone(), "Expression trees only support postfix increment operator `$x++`.".into(), )); "__unsupported" } // Postfix -- Uop::Updecr => "__postfixMinusMinus", // Prefix -- Uop::Udecr => { errors.push(( pos.clone(), "Expression trees only support postfix decrement operator `$x--`.".into(), )); "__unsupported" } Uop::Usilence => { errors.push(( pos.clone(), "Expression trees do not support the error suppression operator `@`." .into(), )); "__unsupported" } }; let virtual_expr = meth_call(rewritten_operand.virtual_expr, op_str, vec![], &pos); let desugar_expr = v_meth_call( et::VISIT_UNOP, vec![ pos_expr, rewritten_operand.desugar_expr, string_literal(pos.clone(), op_str), ], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`... ? ... : ...` // Virtualized: ...->__bool() ? ... : ... // Desugared: $0v->visitTernary(new ExprPos(...), ..., ..., ...) Eif(eif) => { let (e1, e2o, e3) = *eif; let rewritten_e1 = rewrite_expr(temps, e1, visitor_name, errors, should_virtualize_functions); let rewritten_e2 = if let Some(e2) = e2o { rewrite_expr(temps, e2, visitor_name, errors, should_virtualize_functions) } else { errors.push(( pos.clone(), "Unsupported expression tree syntax: Elvis operator".into(), )); unchanged_result }; let rewritten_e3 = rewrite_expr(temps, e3, visitor_name, errors, should_virtualize_functions); let desugar_expr = v_meth_call( et::VISIT_TERNARY, vec![ pos_expr, rewritten_e1.desugar_expr, rewritten_e2.desugar_expr, rewritten_e3.desugar_expr, ], &pos, ); let virtual_expr = Expr( (), pos, Eif(Box::new(( boolify(rewritten_e1.virtual_expr), Some(rewritten_e2.virtual_expr), rewritten_e3.virtual_expr, ))), ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`...()` // Virtualized: (...->__unwrap())() // Desugared: $0v->visitCall(new ExprPos(...), ..., vec[]) Call(call) => { let ast::CallExpr { func: recv, targs, args, unpacked_arg: variadic, } = *call; if variadic.is_some() { errors.push(( pos.clone(), "Expression trees do not support variadic calls.".into(), )); } if !targs.is_empty() { errors.push(( pos.clone(), "Expression trees do not support function calls with generics.".into(), )); } match &recv.2 { // Don't transform calls to `hh_show`. Id(sid) if is_typechecker_fun_name(&sid.1) => { let call_e = Expr::new( (), pos, Call(Box::new(ast::CallExpr { func: recv, targs, args, unpacked_arg: variadic, })), ); return RewriteResult { desugar_expr: call_e.clone(), virtual_expr: call_e, }; } _ => {} } let mut args_without_inout = vec![]; for arg in args { match arg { (ParamKind::Pnormal, e) => args_without_inout.push(e), (ParamKind::Pinout(_), Expr(_, p, _)) => errors.push(( p, "Expression trees do not support `inout` function calls.".into(), )), } } let (virtual_args, desugar_args) = rewrite_exprs( temps, args_without_inout, visitor_name, errors, should_virtualize_functions, ); match recv.2 { // Source: MyDsl`foo()` // Virtualized: (MyDsl::symbolType($0fpXX)->__unwrap())() // Desugared: $0v->visitCall(new ExprPos(...), $0v->visitGlobalFunction(new ExprPos(...), $0fpXX), vec[]) Id(sid) => { let len = temps.global_function_pointers.len(); temps.global_function_pointers.push(global_func_ptr(&sid)); let temp_variable = temp_function_pointer_lvar(&recv.1, len); let desugar_expr = v_meth_call( et::VISIT_CALL, vec![ pos_expr.clone(), v_meth_call( et::VISIT_GLOBAL_FUNCTION, vec![pos_expr, temp_variable.clone()], &pos, ), vec_literal(desugar_args), ], &pos, ); let virtual_expr = Expr( (), pos.clone(), Call(Box::new(ast::CallExpr { func: _virtualize_call( static_meth_call( visitor_name, et::SYMBOL_TYPE, vec![temp_variable], &pos, ), &pos, should_virtualize_functions, ), targs: vec![], args: build_args(virtual_args), unpacked_arg: None, })), ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`Foo::bar()` // Virtualized: (MyDsl::symbolType($0smXX)->__unwrap())() // Desugared: $0v->visitCall(new ExprPos(...), $0v->visitStaticMethod(new ExprPos(...), $0smXX, vec[]) ClassConst(cc) => { let (cid, s) = *cc; if let ClassId_::CIexpr(Expr(_, _, Id(sid))) = &cid.2 { if sid.1 == classes::PARENT || sid.1 == classes::SELF || sid.1 == classes::STATIC { errors.push(( pos, "Static method calls in expression trees require explicit class names.".into(), )); return unchanged_result; } } else { errors.push(( pos, "Expression trees only support function calls and static method calls on named classes.".into(), )); return unchanged_result; }; let len = temps.static_method_pointers.len(); temps .static_method_pointers .push(static_meth_ptr(&recv.1, &cid, &s)); let temp_variable = temp_static_method_lvar(&recv.1, len); let desugar_expr = v_meth_call( et::VISIT_CALL, vec![ pos_expr.clone(), v_meth_call( et::VISIT_STATIC_METHOD, vec![pos_expr, temp_variable.clone()], &pos, ), vec_literal(desugar_args), ], &pos, ); let virtual_expr = Expr( (), pos.clone(), Call(Box::new(ast::CallExpr { func: _virtualize_call( static_meth_call( visitor_name, et::SYMBOL_TYPE, vec![temp_variable], &pos, ), &pos, should_virtualize_functions, ), targs: vec![], args: build_args(virtual_args), unpacked_arg: None, })), ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`$x->bar()` // Virtualized: $x->bar() // Desugared: $0v->visitCall($0v->visitMethodCall(new ExprPos(...), $0v->visitLocal(new ExprPos(...), '$x'), 'bar'), vec[]) ObjGet(og) if og.3 == ast::PropOrMethod::IsMethod => { errors.push(( pos, "Expression trees do not support calling instance methods".into(), )); unchanged_result } _ => { let rewritten_recv = rewrite_expr( temps, Expr((), recv.1, recv.2), visitor_name, errors, should_virtualize_functions, ); let desugar_expr = v_meth_call( et::VISIT_CALL, vec![ pos_expr, rewritten_recv.desugar_expr, vec_literal(desugar_args), ], &pos, ); let virtual_expr = Expr( (), pos.clone(), Call(Box::new(ast::CallExpr { func: _virtualize_call( rewritten_recv.virtual_expr, &pos, should_virtualize_functions, ), targs: vec![], args: build_args(virtual_args), unpacked_arg: None, })), ); RewriteResult { virtual_expr, desugar_expr, } } } } // Source: MyDsl`($x) ==> { ... }` // Virtualized: ($x) ==> { ...; return MyDsl::voidType(); } // if no `return expr;` statements. // Desugared: $0v->visitLambda(new ExprPos(...), vec['$x'], vec[...]). Lfun(lf) => { let mut fun_ = lf.0; match &fun_ { aast::Fun_ { // Allow a plain function that isn't async. fun_kind: ast::FunKind::FSync, body: _, span: _, doc_comment: _, ret: _, annotation: (), params: _, user_attributes: _, // The function should not use any of these newer features. readonly_this: None, readonly_ret: None, ctxs: None, unsafe_ctxs: None, external: false, } => {} _ => { errors.push(( pos.clone(), "Expression trees only support simple lambdas, without features like `async`, generators or capabilities." .into(), )); } } let mut param_names = Vec::with_capacity(fun_.params.len()); for param in &fun_.params { if param.expr.is_some() { errors.push(( param.pos.clone(), "Expression trees do not support parameters with default values.".into(), )); } param_names.push(string_literal(param.pos.clone(), &param.name)); } let body = std::mem::take(&mut fun_.body.fb_ast.0); let should_append_return = only_void_return(&body); let (mut virtual_body_stmts, desugar_body) = rewrite_stmts( temps, body, visitor_name, errors, should_virtualize_functions, ); if should_append_return { virtual_body_stmts.push(Stmt( pos.clone(), aast::Stmt_::Return(Box::new(Some(static_meth_call( visitor_name, et::VOID_TYPE, vec![], &pos, )))), )); } let desugar_expr = v_meth_call( et::VISIT_LAMBDA, vec![ pos_expr, vec_literal(param_names), vec_literal(desugar_body), ], &pos, ); fun_.body.fb_ast = ast::Block(virtual_body_stmts); let virtual_expr = _virtualize_lambda( visitor_name, Expr((), pos.clone(), Lfun(Box::new((fun_, vec![])))), &pos, should_virtualize_functions, ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`${ ... }` // Virtualized to `${ ... }` // Desugared to `$0v->splice(new ExprPos(...), '$var_name', ...)` ETSplice(e) => { if let Err(err) = check_nested_splice(&e) { errors.push(err); }; let len = temps.splices.len(); let expr_pos = e.1.clone(); temps.splices.push(*e); let temp_variable = temp_splice_lvar(&expr_pos, len); let temp_variable_string = string_literal(expr_pos, &temp_splice_lvar_string(len)); let desugar_expr = v_meth_call( et::SPLICE, vec![pos_expr, temp_variable_string, temp_variable.clone()], &pos, ); let virtual_expr = Expr((), pos, ETSplice(Box::new(temp_variable))); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`(...)->foo` // Virtualized to: `(...)->foo` // Desugared to `$0v->visitPropertyAccess(new ExprPos(...), ...), 'foo')` ObjGet(og) => { let (e1, e2, null_flavor, is_prop_call) = *og; if null_flavor == OgNullFlavor::OGNullsafe { errors.push(( pos.clone(), "Expression Trees do not support nullsafe property access".into(), )); } let rewritten_e1 = rewrite_expr(temps, e1, visitor_name, errors, should_virtualize_functions); let id = if let Id(id) = &e2.2 { string_literal(id.0.clone(), &id.1) } else { errors.push(( pos.clone(), "Expression trees only support named property access.".into(), )); e2.clone() }; let desugar_expr = v_meth_call( et::VISIT_PROPERTY_ACCESS, vec![pos_expr, rewritten_e1.desugar_expr, id], &pos, ); let virtual_expr = Expr( (), pos, ObjGet(Box::new(( rewritten_e1.virtual_expr, e2, null_flavor, is_prop_call, ))), ); RewriteResult { virtual_expr, desugar_expr, } } // Source: MyDsl`<foo my-attr="stuff">text <foo-child/> </foo>` // Virtualized: <foo my-attr={MyDsl::stringType()}>{MyDsl::stringType()} <foo-child/> </foo> // Desugared: // $0v->visitXhp( // new ExprPos(...), // :foo::class, // dict["my-attr" => $0v->visitString(...)], // vec[ // $0v->visitString(..., "text ")], // $0v->visitXhp(..., :foo-child::class, ...), // ], // ) Xml(xml) => { let (hint, attrs, children) = *xml; let mut virtual_attrs = vec![]; let mut desugar_attrs = vec![]; for attr in attrs { match attr { aast::XhpAttribute::XhpSimple(xs) => { let (attr_name_pos, attr_name) = xs.name.clone(); let dict_key = Expr::new((), attr_name_pos, Expr_::String(BString::from(attr_name))); let rewritten_attr_expr = rewrite_expr( temps, xs.expr, visitor_name, errors, should_virtualize_functions, ); desugar_attrs.push((dict_key, rewritten_attr_expr.desugar_expr)); virtual_attrs.push(aast::XhpAttribute::XhpSimple(aast::XhpSimple { expr: rewritten_attr_expr.virtual_expr, ..xs })) } aast::XhpAttribute::XhpSpread(e) => { errors.push(( e.1, "Expression trees do not support attribute spread syntax.".into(), )); } } } let (virtual_children, desugar_children) = rewrite_exprs( temps, children, visitor_name, errors, should_virtualize_functions, ); // Construct :foo::class. let hint_pos = hint.0.clone(); let hint_class = Expr_::ClassConst(Box::new(( ClassId( (), hint_pos.clone(), ClassId_::CIexpr(Expr::new( (), hint_pos.clone(), Expr_::Id(Box::new(ast_defs::Id(hint_pos.clone(), hint.1.clone()))), )), ), (hint_pos, "class".to_string()), ))); let virtual_expr = Expr( (), pos.clone(), Xml(Box::new((hint, virtual_attrs, virtual_children))), ); let desugar_expr = v_meth_call( et::VISIT_XHP, vec![ pos_expr, Expr((), pos.clone(), hint_class), dict_literal(&pos, desugar_attrs), vec_literal(desugar_children), ], &pos, ); RewriteResult { virtual_expr, desugar_expr, } } ClassConst(_) => { errors.push(( pos, "Expression trees do not support directly referencing class consts. Consider splicing values defined outside the scope of an Expression Tree using ${...}.".into(), )); unchanged_result } Efun(_) => { errors.push(( pos, "Expression trees do not support PHP lambdas. Consider using Hack lambdas `() ==> {}` instead.".into(), )); unchanged_result } ExpressionTree(_) => { errors.push(( pos, "Expression trees may not be nested. Consider splicing Expression trees together using `${}`.".into() )); unchanged_result } _ => { errors.push((pos, "Unsupported expression tree syntax.".into())); unchanged_result } } } fn rewrite_exprs( temps: &mut Temporaries, exprs: Vec<Expr>, visitor_name: &str, errors: &mut Vec<(Pos, String)>, should_virtualize_functions: bool, ) -> (Vec<Expr>, Vec<Expr>) { let mut virtual_results = Vec::with_capacity(exprs.len()); let mut desugar_results = Vec::with_capacity(exprs.len()); for expr in exprs { let rewritten_expr = rewrite_expr( temps, expr, visitor_name, errors, should_virtualize_functions, ); virtual_results.push(rewritten_expr.virtual_expr); desugar_results.push(rewritten_expr.desugar_expr); } (virtual_results, desugar_results) } fn rewrite_stmts( temps: &mut Temporaries, stmts: Vec<Stmt>, visitor_name: &str, errors: &mut Vec<(Pos, String)>, should_virtualize_functions: bool, ) -> (Vec<Stmt>, Vec<Expr>) { let mut virtual_results = Vec::with_capacity(stmts.len()); let mut desugar_results = Vec::with_capacity(stmts.len()); for stmt in stmts { let (virtual_stmt, desugared_expr) = rewrite_stmt( temps, stmt, visitor_name, errors, should_virtualize_functions, ); virtual_results.push(virtual_stmt); if let Some(desugared_expr) = desugared_expr { desugar_results.push(desugared_expr); } } (virtual_results, desugar_results) } fn rewrite_stmt( temps: &mut Temporaries, s: Stmt, visitor_name: &str, errors: &mut Vec<(Pos, String)>, should_virtualize_functions: bool, ) -> (Stmt, Option<Expr>) { use aast::Stmt_::*; let unchanged_result = (s.clone(), None); let Stmt(pos, stmt_) = s; let pos_expr = exprpos(&pos); match stmt_ { Expr(e) => { let result = rewrite_expr(temps, *e, visitor_name, errors, should_virtualize_functions); ( Stmt(pos, Expr(Box::new(result.virtual_expr))), Some(result.desugar_expr), ) } Return(e) => match *e { // Source: MyDsl`return ...;` // Virtualized: return ...; // Desugared: $0v->visitReturn(new ExprPos(...), $0v->...) Some(e) => { let result = rewrite_expr(temps, e, visitor_name, errors, should_virtualize_functions); let desugar_expr = v_meth_call(et::VISIT_RETURN, vec![pos_expr, result.desugar_expr], &pos); let virtual_stmt = Stmt(pos, Return(Box::new(Some(result.virtual_expr)))); (virtual_stmt, Some(desugar_expr)) } // Source: MyDsl`return;` // Virtualized: return MyDsl::voidType(); // Desugared: $0v->visitReturn(new ExprPos(...), null) None => { let desugar_expr = v_meth_call( et::VISIT_RETURN, vec![pos_expr, null_literal(pos.clone())], &pos, ); let virtual_void_expr = static_meth_call(visitor_name, et::VOID_TYPE, vec![], &pos); let virtual_stmt = Stmt(pos, Return(Box::new(Some(virtual_void_expr)))); (virtual_stmt, Some(desugar_expr)) } }, // Source: MyDsl`if (...) {...} else {...}` // Virtualized: if (...->__bool())) {...} else {...} // Desugared: $0v->visitIf(new ExprPos(...), $0v->..., vec[...], vec[...]) If(if_stmt) => { let (cond_expr, then_block, else_block) = *if_stmt; let rewritten_cond = rewrite_expr( temps, cond_expr, visitor_name, errors, should_virtualize_functions, ); let (virtual_then_stmts, desugar_then) = rewrite_stmts( temps, then_block.0, visitor_name, errors, should_virtualize_functions, ); let (virtual_else_stmts, desugar_else) = rewrite_stmts( temps, else_block.0, visitor_name, errors, should_virtualize_functions, ); let desugar_expr = v_meth_call( et::VISIT_IF, vec![ pos_expr, rewritten_cond.desugar_expr, vec_literal(desugar_then), vec_literal(desugar_else), ], &pos, ); let virtual_stmt = Stmt( pos, If(Box::new(( boolify(rewritten_cond.virtual_expr), ast::Block(virtual_then_stmts), ast::Block(virtual_else_stmts), ))), ); (virtual_stmt, Some(desugar_expr)) } // Source: MyDsl`while (...) {...}` // Virtualized: while (...->__bool()) {...} // Desugared: $0v->visitWhile(new ExprPos(...), $0v->..., vec[...]) While(w) => { let (cond, body) = *w; let rewritten_cond = rewrite_expr( temps, cond, visitor_name, errors, should_virtualize_functions, ); let (virtual_body_stmts, desugar_body) = rewrite_stmts( temps, body.0, visitor_name, errors, should_virtualize_functions, ); let desugar_expr = v_meth_call( et::VISIT_WHILE, vec![ pos_expr, rewritten_cond.desugar_expr, vec_literal(desugar_body), ], &pos, ); let virtual_stmt = Stmt( pos, While(Box::new(( boolify(rewritten_cond.virtual_expr), ast::Block(virtual_body_stmts), ))), ); (virtual_stmt, Some(desugar_expr)) } // Source: MyDsl`for (...; ...; ...) {...}` // Virtualized: for (...; ...->__bool(); ...) {...} // Desugared: $0v->visitFor(new ExprPos(...), vec[...], ..., vec[...], vec[...]) For(w) => { let (init, cond, incr, body) = *w; let (virtual_init_exprs, desugar_init_exprs) = rewrite_exprs( temps, init, visitor_name, errors, should_virtualize_functions, ); let (virtual_cond_option, desugar_cond_expr) = match cond { Some(cond) => { let rewritten_cond = rewrite_expr( temps, cond, visitor_name, errors, should_virtualize_functions, ); ( Some(boolify(rewritten_cond.virtual_expr)), rewritten_cond.desugar_expr, ) } None => (None, null_literal(pos.clone())), }; let (virtual_incr_exprs, desugar_incr_exprs) = rewrite_exprs( temps, incr, visitor_name, errors, should_virtualize_functions, ); let (virtual_body_stmts, desugar_body) = rewrite_stmts( temps, body.0, visitor_name, errors, should_virtualize_functions, ); let desugar_expr = v_meth_call( et::VISIT_FOR, vec![ pos_expr, vec_literal(desugar_init_exprs), desugar_cond_expr, vec_literal(desugar_incr_exprs), vec_literal(desugar_body), ], &pos, ); let virtual_stmt = Stmt( pos, For(Box::new(( virtual_init_exprs, virtual_cond_option, virtual_incr_exprs, ast::Block(virtual_body_stmts), ))), ); (virtual_stmt, Some(desugar_expr)) } // Source: MyDsl`break;` // Virtualized: break; // Desugared: $0v->visitBreak(new ExprPos(...)) Break => { let desugar_expr = v_meth_call(et::VISIT_BREAK, vec![pos_expr], &pos); let virtual_stmt = Stmt(pos, Break); (virtual_stmt, Some(desugar_expr)) } // Source: MyDsl`continue;` // Virtualized: continue; // Desugared: $0v->visitContinue(new ExprPos(...)) Continue => { let desugar_expr = v_meth_call(et::VISIT_CONTINUE, vec![pos_expr], &pos); let virtual_stmt = Stmt(pos, Continue); (virtual_stmt, Some(desugar_expr)) } Noop => (Stmt(pos, Noop), None), // Unsupported operators Do(_) => { errors.push(( pos, "Expression trees do not support `do while` loops. Consider using a `while` loop instead.".into(), )); unchanged_result } Switch(_) => { errors.push(( pos, "Expression trees do not support `switch` statements. Consider using `if`/`else if`/`else` instead.".into(), )); unchanged_result } Foreach(_) => { errors.push(( pos, "Expression trees do not support `foreach` loops. Consider using a `for` loop or a `while` loop instead.".into(), )); unchanged_result } _ => { errors.push(( pos, "Expression trees do not support this statement syntax.".into(), )); unchanged_result } } } fn hint_name(hint: &aast::Hint) -> Result<String, (Pos, String)> { if let Hint_::Happly(id, _) = &*hint.1 { Ok(id.1.clone()) } else { Err(( hint.0.clone(), "Could not determine the visitor type for this Expression Tree".into(), )) } } fn immediately_invoked_lambda( pos: &Pos, stmts: Vec<Stmt>, captured_arguments: Vec<((String, Pos), Expr)>, ) -> Expr { let (params_name_pos, call_args): (Vec<(String, Pos)>, Vec<Expr>) = captured_arguments.into_iter().unzip(); let fun_params = params_name_pos .into_iter() .map(|(name, pos): (String, Pos)| -> ast::FunParam { ast::FunParam { annotation: (), type_hint: ast::TypeHint((), None), is_variadic: false, pos, name, expr: None, callconv: ParamKind::Pnormal, readonly: None, user_attributes: Default::default(), visibility: None, } }) .collect(); let call_args = call_args .into_iter() .map(|e: Expr| -> (ParamKind, Expr) { (ParamKind::Pnormal, e) }) .collect(); let func_body = ast::FuncBody { fb_ast: ast::Block(stmts), }; let fun_ = wrap_fun_(func_body, fun_params, pos.clone()); let lambda_expr = Expr::new((), pos.clone(), Expr_::mk_lfun(fun_, vec![])); Expr::new( (), pos.clone(), Expr_::Call(Box::new(ast::CallExpr { func: lambda_expr, targs: vec![], args: call_args, unpacked_arg: None, })), ) } /// Is this is a typechecker pseudo function like `hh_show` that /// shouldn't be desugared? fn is_typechecker_fun_name(name: &str) -> bool { strip_ns(name) == strip_ns(pseudo_functions::HH_SHOW) || strip_ns(name) == strip_ns(pseudo_functions::HH_EXPECT) || strip_ns(name) == strip_ns(pseudo_functions::HH_EXPECT_EQUIVALENT) || strip_ns(name) == strip_ns(pseudo_functions::HH_SHOW_ENV) } fn strip_ns(name: &str) -> &str { match name.chars().next() { Some('\\') => &name[1..], _ => name, } } fn _virtualize_call(e: Expr, pos: &Pos, should_virtualize_functions: bool) -> Expr { if should_virtualize_functions { meth_call(e, "__unwrap", vec![], pos) } else { e } } fn _virtualize_lambda( visitor_name: &str, e: Expr, pos: &Pos, should_virtualize_functions: bool, ) -> Expr { if should_virtualize_functions { static_meth_call(visitor_name, et::LAMBDA_TYPE, vec![e], pos) } else { e } } /// Return a shape literal that describes the values inside this /// expression tree literal. For example, given the expression tree: /// /// $et = Code`${ $x } + foo() + Bar::baz()`; /// /// The metadata is: /// /// shape( /// // Simplified: We actually use a temporary variable whose value is $x. /// 'splices' => dict['$0splice0' => $x], /// /// 'functions' => vec[foo<>], /// 'static_methods' => vec[Bar::baz<>], /// ) fn maketree_metadata( pos: &Pos, splices: &[Expr], functions: &[Expr], static_methods: &[Expr], ) -> Expr { let key_value_pairs = splices .iter() .enumerate() .map(|(i, expr)| { let key = Expr::new( (), expr.1.clone(), Expr_::String(BString::from(temp_splice_lvar_string(i))), ); let value = temp_splice_lvar(&expr.1, i); (key, value) }) .collect(); let splices_dict = dict_literal(pos, key_value_pairs); let function_vars = functions .iter() .enumerate() .map(|(i, expr)| temp_function_pointer_lvar(&expr.1, i)) .collect(); let functions_vec = vec_literal_with_pos(pos, function_vars); let static_method_vars = static_methods .iter() .enumerate() .map(|(i, expr)| temp_static_method_lvar(&expr.1, i)) .collect(); let static_method_vec = vec_literal_with_pos(pos, static_method_vars); shape_literal( pos, vec![ ("splices", splices_dict), ("functions", functions_vec), ("static_methods", static_method_vec), ], ) } fn global_func_ptr(sid: &Sid) -> Expr { let pos = sid.0.clone(); Expr::new( (), pos, Expr_::FunctionPointer(Box::new((ast::FunctionPtrId::FPId(sid.clone()), vec![]))), ) } fn static_meth_ptr(pos: &Pos, cid: &ClassId, meth: &Pstring) -> Expr { Expr::new( (), pos.clone(), Expr_::FunctionPointer(Box::new(( aast::FunctionPtrId::FPClassConst(cid.clone(), meth.clone()), vec![], ))), ) }
Rust
hhvm/hphp/hack/src/parser/lowerer/lib.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. #![feature(box_patterns)] #[macro_use] extern crate lazy_static; mod desugar_expression_tree; mod lowerer; mod modifier; mod scour_comment; pub use lowerer::lower; pub use lowerer::Env; pub use scour_comment::ScourComment;
Rust
hhvm/hphp/hack/src/parser/lowerer/lowerer.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::cell::Ref; use std::cell::RefCell; use std::cell::RefMut; use std::matches; use std::mem; use std::rc::Rc; use std::slice::Iter; use std::str::FromStr; use std::sync::Arc; use bstr::BString; use bstr::B; use bumpalo::Bump; use escaper::*; use hash::HashMap; use hash::HashSet; use itertools::Either; use itertools::Itertools; use lint_rust::LintError; use naming_special_names_rust as sn; use naming_special_names_rust::classes as special_classes; use naming_special_names_rust::literal; use naming_special_names_rust::special_functions; use naming_special_names_rust::special_idents; use naming_special_names_rust::typehints as special_typehints; use naming_special_names_rust::user_attributes as special_attrs; use ocaml_helper::int_of_string_opt; use ocaml_helper::parse_int; use ocaml_helper::ParseIntError; use oxidized::aast; use oxidized::aast::Binop; use oxidized::aast_defs::ClassReq; use oxidized::aast_defs::DocComment; use oxidized::aast_visitor::AstParams; use oxidized::aast_visitor::Node; use oxidized::aast_visitor::Visitor; use oxidized::ast; use oxidized::ast::Expr; use oxidized::ast::Expr_; use oxidized::errors::Error as HHError; use oxidized::errors::Naming; use oxidized::errors::NastCheck; use oxidized::file_info; use oxidized::global_options::GlobalOptions; use oxidized::namespace_env::Env as NamespaceEnv; use oxidized::pos::Pos; use parser_core_types::indexed_source_text::IndexedSourceText; use parser_core_types::lexable_token::LexablePositionedToken; use parser_core_types::lexable_token::LexableToken; use parser_core_types::source_text::SourceText; use parser_core_types::syntax::SyntaxValueWithKind; use parser_core_types::syntax_by_ref::positioned_token::PositionedToken; use parser_core_types::syntax_by_ref::positioned_token::TokenFactory as PositionedTokenFactory; use parser_core_types::syntax_by_ref::positioned_value::PositionedValue; use parser_core_types::syntax_by_ref::syntax::Syntax; use parser_core_types::syntax_by_ref::syntax_variant_generated::SyntaxVariant::*; use parser_core_types::syntax_by_ref::syntax_variant_generated::*; use parser_core_types::syntax_error; use parser_core_types::syntax_kind; use parser_core_types::syntax_trait::SyntaxTrait; use parser_core_types::token_factory::TokenMutator; use parser_core_types::token_kind::TokenKind as TK; use regex::bytes::Regex; use thiserror::Error; use crate::desugar_expression_tree::desugar; use crate::modifier; fn unescape_single(s: &str) -> Result<BString, escaper::InvalidString> { Ok(escaper::unescape_single(s)?.into()) } fn unescape_nowdoc(s: &str) -> Result<BString, escaper::InvalidString> { Ok(escaper::unescape_nowdoc(s)?.into()) } #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum LiftedAwaitKind { LiftedFromStatement, LiftedFromConcurrent, } type LiftedAwaitExprs = Vec<(Option<ast::Lid>, ast::Expr)>; #[derive(Debug, Clone)] pub struct LiftedAwaits { pub awaits: LiftedAwaitExprs, lift_kind: LiftedAwaitKind, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum ExprLocation { TopLevel, MemberSelect, InDoubleQuotedString, AsStatement, RightOfAssignment, RightOfAssignmentInUsingStatement, RightOfReturn, UsingStatement, CallReceiver, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum SuspensionKind { SKSync, SKAsync, } #[derive(Copy, Clone, Eq, PartialEq)] pub enum TokenOp { Skip, Noop, LeftTrim(usize), RightTrim(usize), } #[derive(Debug)] pub struct FunHdr { suspension_kind: SuspensionKind, readonly_this: Option<ast::ReadonlyKind>, name: ast::Sid, constrs: Vec<ast::WhereConstraintHint>, type_parameters: Vec<ast::Tparam>, parameters: Vec<ast::FunParam>, contexts: Option<ast::Contexts>, unsafe_contexts: Option<ast::Contexts>, readonly_return: Option<ast::ReadonlyKind>, return_type: Option<ast::Hint>, internal: bool, } impl FunHdr { fn make_empty(env: &Env<'_>) -> Self { Self { suspension_kind: SuspensionKind::SKSync, readonly_this: None, name: ast::Id(env.mk_none_pos(), String::from("<ANONYMOUS>")), constrs: vec![], type_parameters: vec![], parameters: vec![], contexts: None, unsafe_contexts: None, readonly_return: None, return_type: None, internal: false, } } } #[derive(Debug)] pub struct State { // bool represents reification pub cls_generics: HashMap<String, bool>, // fn_generics also used for methods; maps are separate due to shadowing pub fn_generics: HashMap<String, bool>, pub in_static_method: bool, pub parent_maybe_reified: bool, /// Parsing errors emitted during lowering. Note that most parsing /// errors are emitted in the initial FFP parse. pub parsing_errors: Vec<(Pos, String)>, /// hh_errors captures errors after parsing, naming, nast, etc. pub hh_errors: Vec<HHError>, pub lint_errors: Vec<LintError>, pub doc_comments: Vec<Option<DocComment>>, pub local_id_counter: isize, // TODO(hrust): this check is to avoid crash in Ocaml. // Remove it after all Ocaml callers are eliminated. pub exp_recursion_depth: usize, } const EXP_RECURSION_LIMIT: usize = 30_000; #[derive(Clone)] pub struct Env<'a> { pub codegen: bool, quick_mode: bool, /// Show errors even in quick mode. /// Hotfix until we can properly set up saved states to surface parse errors during /// typechecking properly. pub show_all_errors: bool, file_mode: file_info::Mode, pub top_level_statements: bool, /* Whether we are (still) considering TLSs*/ // Cache none pos, lazy_static doesn't allow Rc. pos_none: Pos, pub empty_ns_env: Arc<NamespaceEnv>, pub saw_yield: bool, /* Information flowing back up */ pub lifted_awaits: Option<LiftedAwaits>, pub tmp_var_counter: isize, pub indexed_source_text: &'a IndexedSourceText<'a>, pub parser_options: &'a GlobalOptions, pub token_factory: PositionedTokenFactory<'a>, pub arena: &'a Bump, state: Rc<RefCell<State>>, } impl<'a> Env<'a> { pub fn make( codegen: bool, quick_mode: bool, show_all_errors: bool, mode: file_info::Mode, indexed_source_text: &'a IndexedSourceText<'a>, parser_options: &'a GlobalOptions, namespace_env: Arc<NamespaceEnv>, token_factory: PositionedTokenFactory<'a>, arena: &'a Bump, ) -> Self { Env { codegen, quick_mode, show_all_errors, file_mode: mode, top_level_statements: true, saw_yield: false, lifted_awaits: None, tmp_var_counter: 1, indexed_source_text, parser_options, pos_none: Pos::NONE, empty_ns_env: namespace_env, token_factory, arena, state: Rc::new(RefCell::new(State { cls_generics: HashMap::default(), fn_generics: HashMap::default(), in_static_method: false, parent_maybe_reified: false, parsing_errors: vec![], doc_comments: vec![], local_id_counter: 1, hh_errors: vec![], lint_errors: vec![], exp_recursion_depth: 0, })), } } fn file_mode(&self) -> file_info::Mode { self.file_mode } fn should_surface_error(&self) -> bool { !self.quick_mode || self.show_all_errors } fn is_typechecker(&self) -> bool { !self.codegen } fn codegen(&self) -> bool { self.codegen } fn source_text(&self) -> &SourceText<'a> { self.indexed_source_text.source_text() } fn cls_generics_mut(&mut self) -> RefMut<'_, HashMap<String, bool>> { RefMut::map(self.state.borrow_mut(), |s| &mut s.cls_generics) } fn fn_generics_mut(&mut self) -> RefMut<'_, HashMap<String, bool>> { RefMut::map(self.state.borrow_mut(), |s| &mut s.fn_generics) } pub fn clear_generics(&mut self) { let mut s = self.state.borrow_mut(); s.cls_generics = HashMap::default(); s.fn_generics = HashMap::default(); } // avoids returning a reference to the env pub fn get_reification(&self, id: &str) -> Option<bool> { let s = self.state.borrow(); if let Some(reif) = s.fn_generics.get(id) { Some(*reif) } else { s.cls_generics.get(id).copied() } } fn in_static_method(&mut self) -> RefMut<'_, bool> { RefMut::map(self.state.borrow_mut(), |s| &mut s.in_static_method) } fn parent_maybe_reified(&mut self) -> RefMut<'_, bool> { RefMut::map(self.state.borrow_mut(), |s| &mut s.parent_maybe_reified) } pub fn parsing_errors(&mut self) -> RefMut<'_, Vec<(Pos, String)>> { RefMut::map(self.state.borrow_mut(), |s| &mut s.parsing_errors) } pub fn hh_errors(&mut self) -> RefMut<'_, Vec<HHError>> { RefMut::map(self.state.borrow_mut(), |s| &mut s.hh_errors) } pub fn lint_errors(&mut self) -> RefMut<'_, Vec<LintError>> { RefMut::map(self.state.borrow_mut(), |s| &mut s.lint_errors) } fn top_docblock(&self) -> Ref<'_, Option<DocComment>> { Ref::map(self.state.borrow(), |s| { s.doc_comments.last().unwrap_or(&None) }) } fn exp_recursion_depth(&self) -> RefMut<'_, usize> { RefMut::map(self.state.borrow_mut(), |s| &mut s.exp_recursion_depth) } fn next_local_id(&self) -> isize { let mut id = RefMut::map(self.state.borrow_mut(), |s| &mut s.local_id_counter); *id += 1; *id } fn push_docblock(&mut self, doc_comment: Option<DocComment>) { RefMut::map(self.state.borrow_mut(), |s| &mut s.doc_comments).push(doc_comment) } fn pop_docblock(&mut self) { RefMut::map(self.state.borrow_mut(), |s| &mut s.doc_comments).pop(); } fn make_tmp_var_name(&mut self) -> String { let name = String::from(special_idents::TMP_VAR_PREFIX) + &self.tmp_var_counter.to_string(); self.tmp_var_counter += 1; name } fn mk_none_pos(&self) -> Pos { self.pos_none.clone() } fn clone_and_unset_toplevel_if_toplevel<'b, 'c>( e: &'b mut Env<'c>, ) -> impl AsMut<Env<'c>> + 'b { if e.top_level_statements { let mut cloned = e.clone(); cloned.top_level_statements = false; Either::Left(cloned) } else { Either::Right(e) } } } impl<'a> AsMut<Env<'a>> for Env<'a> { fn as_mut(&mut self) -> &mut Env<'a> { self } } type Result<T, E = Error> = std::result::Result<T, E>; #[derive(Debug, Error, PartialEq)] pub enum Error { #[error( "missing case in {expecting:?}.\n - pos: {pos:?}\n - unexpected: '{node_name:?}'\n - kind: {kind:?}\n" )] MissingSyntax { expecting: String, pos: Pos, node_name: String, kind: syntax_kind::SyntaxKind, }, #[error("{message}")] ParsingError { message: String, pos: Pos }, } fn emit_error<'a>(error: Error, env: &mut Env<'a>) { // Don't emit multiple parsing errors during lowering. Once we've // seen one parsing error, later parsing errors are rarely // meaningful. if !env.parsing_errors().is_empty() { return; } match error { Error::MissingSyntax { expecting, pos, node_name, .. } => { let msg = syntax_error::lowering_parsing_error(&node_name, &expecting); env.parsing_errors().push((pos, msg.to_string())); } Error::ParsingError { message, pos } => { env.parsing_errors().push((pos, message)); } } } type S<'arena> = &'arena Syntax<'arena, PositionedToken<'arena>, PositionedValue<'arena>>; fn p_pos<'a>(node: S<'a>, env: &Env<'_>) -> Pos { node.position_exclusive(env.indexed_source_text) .map_or_else(|| env.mk_none_pos(), Into::into) } fn raise_parsing_error<'a>(node: S<'a>, env: &mut Env<'a>, msg: &str) { let pos = p_pos(node, env); raise_parsing_error_(pos, env, msg) } fn raise_parsing_error_pos(pos: &Pos, env: &mut Env<'_>, msg: &str) { raise_parsing_error_(pos.clone(), env, msg) } fn raise_parsing_error_(pos: Pos, env: &mut Env<'_>, msg: &str) { if env.should_surface_error() || env.codegen() { env.parsing_errors().push((pos, String::from(msg))) } } fn raise_hh_error(env: &mut Env<'_>, err: HHError) { env.hh_errors().push(err); } fn raise_lint_error(env: &mut Env<'_>, err: LintError) { env.lint_errors().push(err); } fn parsing_error<N>(msg: impl Into<String>, pos: Pos) -> Result<N> { Err(Error::ParsingError { message: msg.into(), pos, }) } fn text<'a>(node: S<'a>, env: &Env<'_>) -> String { String::from(node.text(env.source_text())) } fn text_str<'b, 'a>(node: S<'a>, env: &'b Env<'_>) -> &'b str { node.text(env.source_text()) } fn lowering_error(env: &mut Env<'_>, pos: &Pos, text: &str, syntax_kind: &str) { if env.is_typechecker() && env.parsing_errors().is_empty() { raise_parsing_error_pos( pos, env, &syntax_error::lowering_parsing_error(text, syntax_kind), ) } } fn raise_missing_syntax(expecting: &str, node: S<'_>, env: &mut Env<'_>) { let pos = p_pos(node, env); let text = text(node, env); lowering_error(env, &pos, &text, expecting); } fn missing_syntax<'a, N>(expecting: &str, node: S<'a>, env: &mut Env<'a>) -> Result<N> { let text = text(node, env); Err(Error::MissingSyntax { expecting: String::from(expecting), pos: p_pos(node, env), node_name: text, kind: node.kind(), }) } fn map_optional<'a, F, R>(node: S<'a>, env: &mut Env<'a>, p: F) -> Result<Option<R>> where F: FnOnce(S<'a>, &mut Env<'a>) -> Result<R>, { match &node.children { Missing => Ok(None), _ => p(node, env).map(Some), } } fn map_optional_emit_error<'a, F, R>(node: S<'a>, env: &mut Env<'a>, p: F) -> Option<R> where F: FnOnce(S<'a>, &mut Env<'a>) -> Result<R>, { match &node.children { Missing => None, _ => match p(node, env) { Ok(v) => Some(v), Err(e) => { emit_error(e, env); None } }, } } fn pos_module_name<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Sid> { if let ModuleName(c) = &node.children { if let SyntaxList(l) = &c.parts.children { let p = p_pos(node, env); let mut s = String::with_capacity(node.width()); for i in l.iter() { match &i.children { ListItem(li) => { s += text_str(&li.item, env); s += text_str(&li.separator, env); } _ => s += text_str(i, env), } } return Ok(ast::Id(p, s)); } } missing_syntax("module name", node, env) } fn pos_qualified_name<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Sid> { if let QualifiedName(c) = &node.children { if let SyntaxList(l) = &c.parts.children { let p = p_pos(node, env); let mut s = String::with_capacity(node.width()); for i in l.iter() { match &i.children { ListItem(li) => { s += text_str(&li.item, env); s += text_str(&li.separator, env); } _ => s += text_str(i, env), } } return Ok(ast::Id(p, s)); } } missing_syntax("qualified name", node, env) } fn pos_name<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Sid> { pos_name_(node, env, None) } fn lid_from_pos_name<'a>(pos: Pos, name: S<'a>, env: &mut Env<'a>) -> Result<ast::Lid> { let name = pos_name(name, env)?; Ok(ast::Lid::new(pos, name.1)) } fn lid_from_name<'a>(name: S<'a>, env: &mut Env<'a>) -> Result<ast::Lid> { let name = pos_name(name, env)?; Ok(ast::Lid::new(name.0, name.1)) } fn p_pstring<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Pstring> { p_pstring_(node, env, None) } fn p_pstring_<'a>( node: S<'a>, env: &mut Env<'a>, drop_prefix: Option<char>, ) -> Result<ast::Pstring> { let ast::Id(p, id) = pos_name_(node, env, drop_prefix)?; Ok((p, id)) } fn drop_prefix(s: &str, prefix: char) -> &str { if !s.is_empty() && s.starts_with(prefix) { &s[1..] } else { s } } fn pos_name_<'a>(node: S<'a>, env: &mut Env<'a>, drop_prefix_c: Option<char>) -> Result<ast::Sid> { match &node.children { QualifiedName(_) => pos_qualified_name(node, env), SimpleTypeSpecifier(c) => pos_name_(&c.specifier, env, drop_prefix_c), _ => { let mut name = node.text(env.indexed_source_text.source_text()); if let Some(prefix) = drop_prefix_c { name = drop_prefix(name, prefix); } let p = p_pos(node, env); Ok(ast::Id(p, String::from(name))) } } } fn mk_str<'a, F>(node: S<'a>, env: &mut Env<'a>, mut content: &str, unescaper: F) -> BString where F: Fn(&str) -> Result<BString, InvalidString>, { if let Some('b') = content.chars().next() { content = content.get(1..).unwrap(); } let len = content.len(); let no_quotes_result = extract_unquoted_string(content, 0, len); match no_quotes_result { Ok(no_quotes) => { let result = unescaper(&no_quotes); match result { Ok(s) => s, Err(_) => { raise_parsing_error( node, env, &format!("Malformed string literal <<{}>>", &no_quotes), ); BString::from("") } } } Err(_) => { raise_parsing_error( node, env, &format!("Malformed string literal <<{}>>", &content), ); BString::from("") } } } fn unesc_dbl(s: &str) -> Result<BString, InvalidString> { let unesc_s = unescape_double(s)?; if unesc_s == B("''") || unesc_s == B("\"\"") { Ok(BString::from("")) } else { Ok(unesc_s) } } // TODO: return Cow<[u8]> fn unesc_xhp(s: &[u8]) -> Vec<u8> { lazy_static! { static ref WHITESPACE: Regex = Regex::new("[\x20\t\n\r\x0c]+").unwrap(); } WHITESPACE.replace_all(s, &b" "[..]).into_owned() } fn unesc_xhp_attr(s: &[u8]) -> Vec<u8> { // TODO: change unesc_dbl to &[u8] -> BString let r = get_quoted_content(s); let r = unsafe { std::str::from_utf8_unchecked(r) }; unesc_dbl(r).unwrap().into() } fn get_quoted_content(s: &[u8]) -> &[u8] { lazy_static! { static ref QUOTED: Regex = Regex::new(r#"^[\x20\t\n\r\x0c]*"((?:.|\n)*)""#).unwrap(); } QUOTED .captures(s) .and_then(|c| c.get(1)) .map_or(s, |m| m.as_bytes()) } fn token_kind<'a>(node: S<'a>) -> Option<TK> { match &node.children { Token(t) => Some(t.kind()), _ => None, } } fn check_valid_reified_hint<'a>(env: &mut Env<'a>, node: S<'a>, hint: &ast::Hint) { struct Checker<F: FnMut(&String)>(F); impl<'ast, F: FnMut(&String)> Visitor<'ast> for Checker<F> { type Params = AstParams<(), ()>; fn object(&mut self) -> &mut dyn Visitor<'ast, Params = Self::Params> { self } fn visit_hint(&mut self, c: &mut (), h: &ast::Hint) -> Result<(), ()> { match h.1.as_ref() { ast::Hint_::Happly(id, _) => { self.0(&id.1); } ast::Hint_::Haccess(_, ids) => { ids.iter().for_each(|id| self.0(&id.1)); } _ => {} } h.recurse(c, self) } } if *env.in_static_method() { let f = |id: &String| { fail_if_invalid_reified_generic(node, env, id); }; let mut visitor = Checker(f); visitor.visit_hint(&mut (), hint).unwrap(); } } fn p_closure_parameter<'a>( node: S<'a>, env: &mut Env<'a>, ) -> Result<(ast::Hint, Option<ast::HfParamInfo>)> { match &node.children { ClosureParameterTypeSpecifier(c) => { let kind = p_param_kind(&c.call_convention, env)?; let readonlyness = map_optional(&c.readonly, env, p_readonly)?; let info = Some(ast::HfParamInfo { kind, readonlyness }); let hint = p_hint(&c.type_, env)?; Ok((hint, info)) } _ => missing_syntax("closure parameter", node, env), } } fn map_shape_expression_field<'a, F, R>( node: S<'a>, env: &mut Env<'a>, f: F, ) -> Result<(ast::ShapeFieldName, R)> where F: Fn(S<'a>, &mut Env<'a>) -> Result<R>, { match &node.children { FieldInitializer(c) => { let name = p_shape_field_name(&c.name, env)?; let value = f(&c.value, env)?; Ok((name, value)) } _ => missing_syntax("shape field", node, env), } } fn p_shape_field_name<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::ShapeFieldName> { use ast::ShapeFieldName::*; let is_valid_shape_literal = |t: &PositionedToken<'a>| { let is_str = t.kind() == TK::SingleQuotedStringLiteral || t.kind() == TK::DoubleQuotedStringLiteral; let text = t.text(env.source_text()); let is_empty = text == "\'\'" || text == "\"\""; is_str && !is_empty }; if let LiteralExpression(c) = &node.children { if let Token(t) = &c.expression.children { if is_valid_shape_literal(t) { let ast::Id(p, n) = pos_name(node, env)?; let unescp = if t.kind() == TK::SingleQuotedStringLiteral { unescape_single } else { unesc_dbl }; let str_ = mk_str(node, env, &n, unescp); if int_of_string_opt(&str_).is_some() { raise_parsing_error(node, env, &syntax_error::shape_field_int_like_string) } return Ok(SFlitStr((p, str_))); } } } match &node.children { ScopeResolutionExpression(c) => Ok(SFclassConst( pos_name(&c.qualifier, env)?, p_pstring(&c.name, env)?, )), _ => { raise_parsing_error(node, env, &syntax_error::invalid_shape_field_name); let ast::Id(p, n) = pos_name(node, env)?; Ok(SFlitStr((p, mk_str(node, env, &n, unesc_dbl)))) } } } fn p_shape_field<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::ShapeFieldInfo> { match &node.children { FieldSpecifier(c) => { let optional = !c.question.is_missing(); let name = p_shape_field_name(&c.name, env)?; let hint = p_hint(&c.type_, env)?; Ok(ast::ShapeFieldInfo { optional, hint, name, }) } _ => { let (name, hint) = map_shape_expression_field(node, env, p_hint)?; Ok(ast::ShapeFieldInfo { optional: false, name, hint, }) } } } fn p_targ<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Targ> { Ok(ast::Targ((), p_hint(node, env)?)) } fn p_unary_hint<'a>(kw: S<'a>, ty: S<'a>, env: &mut Env<'a>) -> Result<ast::Hint_> { Ok(ast::Hint_::Happly( pos_name(kw, env)?, could_map(ty, env, p_hint)?, )) } fn p_binary_hint<'a>(kw: S<'a>, key: S<'a>, ty: S<'a>, env: &mut Env<'a>) -> Result<ast::Hint_> { let kw = pos_name(kw, env)?; let key = p_hint(key, env)?; let value = p_hint(ty, env)?; Ok(ast::Hint_::Happly(kw, vec![key, value])) } fn p_hint_<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Hint_> { use ast::Hint_::*; match &node.children { Token(token) if token.kind() == TK::Variable => { let ast::Id(_pos, name) = pos_name(node, env)?; Ok(Hvar(name)) } /* Dirty hack; CastExpression can have type represented by token */ Token(_) | SimpleTypeSpecifier(_) | QualifiedName(_) => { let ast::Id(pos, name) = pos_name(node, env)?; if "integer".eq_ignore_ascii_case(&name) { raise_hh_error( env, Naming::bad_builtin_type(pos.clone(), &name, special_typehints::INT), ); } else if "boolean".eq_ignore_ascii_case(&name) { raise_hh_error( env, Naming::bad_builtin_type(pos.clone(), &name, special_typehints::BOOL), ); } else if "double".eq_ignore_ascii_case(&name) || "real".eq_ignore_ascii_case(&name) { raise_hh_error( env, Naming::bad_builtin_type(pos.clone(), &name, special_typehints::FLOAT), ); } if env.file_mode() != file_info::Mode::Mhhi && !env.codegen() { let sn = strip_ns(&name); if sn.starts_with(sn::coeffects::CONTEXTS) || sn.starts_with(sn::coeffects::CAPABILITIES) { raise_parsing_error(node, env, &syntax_error::direct_coeffects_reference); } } if name == "_" { Ok(Hwildcard) } else { Ok(Happly(ast::Id(pos, name), vec![])) } } ShapeTypeSpecifier(c) => { let allows_unknown_fields = !c.ellipsis.is_missing(); /* if last element lacks a separator and ellipsis is present, error */ if allows_unknown_fields { if let SyntaxList(items) = &c.fields.children { if let Some(ListItem(item)) = items.last().map(|i| &i.children) { if item.separator.is_missing() { raise_parsing_error( node, env, &syntax_error::shape_type_ellipsis_without_trailing_comma, ) } } } } let field_map = could_map(&c.fields, env, p_shape_field)?; let mut set = HashSet::default(); for f in field_map.iter() { if !set.insert(f.name.get_name()) { raise_hh_error(env, Naming::fd_name_already_bound(f.name.get_pos().clone())); } } Ok(Hshape(ast::NastShapeInfo { allows_unknown_fields, field_map, })) } TupleTypeSpecifier(c) => Ok(Htuple(could_map(&c.types, env, p_hint)?)), UnionTypeSpecifier(c) => Ok(Hunion(could_map(&c.types, env, p_hint)?)), IntersectionTypeSpecifier(c) => Ok(Hintersection(could_map(&c.types, env, p_hint)?)), KeysetTypeSpecifier(c) => Ok(Happly( pos_name(&c.keyword, env)?, could_map(&c.type_, env, p_hint)?, )), VectorTypeSpecifier(c) => p_unary_hint(&c.keyword, &c.type_, env), ClassnameTypeSpecifier(c) => p_unary_hint(&c.keyword, &c.type_, env), TupleTypeExplicitSpecifier(c) => p_unary_hint(&c.keyword, &c.types, env), VarrayTypeSpecifier(c) => p_unary_hint(&c.keyword, &c.type_, env), DarrayTypeSpecifier(c) => p_binary_hint(&c.keyword, &c.key, &c.value, env), DictionaryTypeSpecifier(c) => p_unary_hint(&c.keyword, &c.members, env), GenericTypeSpecifier(c) => { let name = pos_name(&c.class_type, env)?; let args = &c.argument_list; let type_args = match &args.children { TypeArguments(c) => could_map(&c.types, env, p_hint)?, _ => missing_syntax("generic type arguments", args, env)?, }; Ok(Happly(name, type_args)) } NullableTypeSpecifier(c) => Ok(Hoption(p_hint(&c.type_, env)?)), LikeTypeSpecifier(c) => Ok(Hlike(p_hint(&c.type_, env)?)), SoftTypeSpecifier(c) => Ok(Hsoft(p_hint(&c.type_, env)?)), ClosureTypeSpecifier(c) => { let (param_list, variadic_hints): (Vec<S<'a>>, Vec<S<'a>>) = c .parameter_list .syntax_node_to_list_skip_separator() .partition(|n| match &n.children { VariadicParameter(_) => false, _ => true, }); let (type_hints, info) = param_list .iter() .map(|p| p_closure_parameter(p, env)) .collect::<Result<Vec<_>, _>>()? .into_iter() .unzip(); let variadic_hints = variadic_hints .iter() .map(|v| match &v.children { VariadicParameter(c) => { if c.type_.is_missing() { raise_parsing_error(v, env, "Cannot use ... without a typehint"); } Ok(Some(p_hint(&c.type_, env)?)) } _ => panic!("expect variadic parameter"), }) .collect::<Result<Vec<_>, _>>()?; if variadic_hints.len() > 1 { return parsing_error( format!( "{} variadic parameters found. There should be no more than one.", variadic_hints.len() ), p_pos(&c.parameter_list, env), ); } let ctxs = p_contexts( &c.contexts, env, Some(( "A closure type hint cannot have a polymorphic context", true, )), ); Ok(Hfun(ast::HintFun { is_readonly: map_optional(&c.readonly_keyword, env, p_readonly)?, param_tys: type_hints, param_info: info, variadic_ty: variadic_hints.into_iter().next().unwrap_or(None), ctxs, return_ty: p_hint(&c.return_type, env)?, is_readonly_return: map_optional(&c.readonly_return, env, p_readonly)?, })) } AttributizedSpecifier(c) => { let attrs = p_user_attribute(&c.attribute_spec, env)?; let hint = p_hint(&c.type_, env)?; if attrs.iter().any(|attr| attr.name.1 != special_attrs::SOFT) { raise_parsing_error(node, env, &syntax_error::only_soft_allowed); } Ok(*soften_hint(&attrs, hint).1) } FunctionCtxTypeSpecifier(c) => { let ast::Id(_p, n) = pos_name(&c.variable, env)?; Ok(HfunContext(n)) } TypeConstant(c) => { let child = pos_name(&c.right_type, env)?; match p_hint_(&c.left_type, env)? { Haccess(root, mut cs) => { cs.push(child); Ok(Haccess(root, cs)) } Hvar(n) => { let pos = p_pos(&c.left_type, env); let root = ast::Hint::new(pos, Hvar(n)); Ok(Haccess(root, vec![child])) } Happly(ty, param) => { if param.is_empty() { let root = ast::Hint::new(ty.0.clone(), Happly(ty, param)); Ok(Haccess(root, vec![child])) } else { missing_syntax("type constant base", node, env) } } _ => missing_syntax("type constant base", node, env), } } ReifiedTypeArgument(_) => { raise_parsing_error(node, env, &syntax_error::invalid_reified); missing_syntax("refied type", node, env) } TypeRefinement(c) => Ok(ast::Hint_::Hrefinement( p_hint(&c.type_, env)?, could_map(&c.members, env, p_refinement_member)?, )), _ => missing_syntax("type hint", node, env), } } fn p_hint<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Hint> { let hint_ = p_hint_(node, env)?; let pos = p_pos(node, env); let hint = ast::Hint::new(pos, hint_); check_valid_reified_hint(env, node, &hint); Ok(hint) } fn p_refinement_member<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Refinement> { match &node.children { TypeInRefinement(c) => Ok(ast::Refinement::Rtype( pos_name(&c.name, env)?, if c.type_.is_missing() { let (lower, upper) = p_tconstraints_into_lower_and_upper(&c.constraints, env); ast::TypeRefinement::TRloose(ast::TypeRefinementBounds { lower, upper }) } else { ast::TypeRefinement::TRexact(p_hint(&c.type_, env)?) }, )), CtxInRefinement(c) => { let name = pos_name(&c.name, env)?; if c.ctx_list.is_missing() { let (lower, upper) = p_ctx_constraints(&c.constraints, env)?; Ok(ast::Refinement::Rctx( name, ast::CtxRefinement::CRloose(ast::CtxRefinementBounds { lower, upper }), )) } else if let Some(hint) = p_context_list_to_intersection( &c.ctx_list, env, "Refinement members cannot alias polymorphic contexts", ) { Ok(ast::Refinement::Rctx( name, ast::CtxRefinement::CRexact(hint), )) } else { missing_syntax("refinement member's bound(s)", node, env) } } _ => missing_syntax("refinement member", node, env), } } fn p_simple_initializer<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Expr> { match &node.children { SimpleInitializer(c) => p_expr(&c.value, env), _ => missing_syntax("simple initializer", node, env), } } fn p_member<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<(ast::Expr, ast::Expr)> { match &node.children { ElementInitializer(c) => Ok((p_expr(&c.key, env)?, p_expr(&c.value, env)?)), _ => missing_syntax("darray intrinsic expression element", node, env), } } fn expand_type_args<'a>(ty: S<'a>, env: &mut Env<'a>) -> Result<Vec<ast::Hint>> { match &ty.children { TypeArguments(c) => could_map(&c.types, env, p_hint), _ => Ok(vec![]), } } fn p_afield<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Afield> { match &node.children { ElementInitializer(c) => Ok(ast::Afield::AFkvalue( p_expr(&c.key, env)?, p_expr(&c.value, env)?, )), _ => Ok(ast::Afield::AFvalue(p_expr(node, env)?)), } } fn p_field<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Field, Error> { match &node.children { ElementInitializer(c) => Ok(ast::Field(p_expr(&c.key, env)?, p_expr(&c.value, env)?)), _ => missing_syntax("key-value collection element", node, env), } } // We lower readonly lambda declarations as making the inner lambda have readonly_this. fn process_readonly_expr(mut e: ast::Expr) -> Expr_ { match &mut e { ast::Expr(_, _, Expr_::Efun(ref mut efun)) if efun.fun.readonly_this.is_none() => { efun.fun.readonly_this = Some(ast::ReadonlyKind::Readonly); e.2 } ast::Expr(_, _, Expr_::Lfun(ref mut l)) if l.0.readonly_this.is_none() => { l.0.readonly_this = Some(ast::ReadonlyKind::Readonly); e.2 } _ => Expr_::mk_readonly_expr(e), } } fn check_intrinsic_type_arg_varity<'a>( node: S<'a>, env: &mut Env<'a>, tys: Vec<ast::Hint>, ) -> Option<ast::CollectionTarg> { let count = tys.len(); let mut tys = tys.into_iter(); match count { 2 => Some(ast::CollectionTarg::CollectionTKV( ast::Targ((), tys.next().unwrap()), ast::Targ((), tys.next().unwrap()), )), 1 => Some(ast::CollectionTarg::CollectionTV(ast::Targ( (), tys.next().unwrap(), ))), 0 => None, _ => { raise_parsing_error(node, env, &syntax_error::collection_intrinsic_many_typeargs); None } } } fn p_import_flavor<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::ImportFlavor> { use ast::ImportFlavor::*; match token_kind(node) { Some(TK::Include) => Ok(Include), Some(TK::Require) => Ok(Require), Some(TK::Include_once) => Ok(IncludeOnce), Some(TK::Require_once) => Ok(RequireOnce), _ => missing_syntax("import flavor", node, env), } } fn p_null_flavor<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::OgNullFlavor> { use ast::OgNullFlavor::*; match token_kind(node) { Some(TK::QuestionMinusGreaterThan) => Ok(OGNullsafe), Some(TK::MinusGreaterThan) => Ok(OGNullthrows), _ => missing_syntax("null flavor", node, env), } } fn wrap_unescaper<F>(s: &str, unescaper: F, err_pos: Pos) -> Result<BString> where F: FnOnce(&str) -> Result<BString, InvalidString>, { unescaper(s).map_err(|e| Error::ParsingError { message: e.msg, pos: err_pos, }) } fn fail_if_invalid_class_creation<'a>(node: S<'a>, env: &mut Env<'a>, id: impl AsRef<str>) { let id = id.as_ref(); let is_in_static_method = *env.in_static_method(); if is_in_static_method && ((id == special_classes::SELF && env.cls_generics_mut().values().any(|reif| *reif)) || (id == special_classes::PARENT && *env.parent_maybe_reified())) { raise_parsing_error(node, env, &syntax_error::static_method_reified_obj_creation); } } fn fail_if_invalid_reified_generic<'a>(node: S<'a>, env: &mut Env<'a>, id: impl AsRef<str>) { let is_in_static_method = *env.in_static_method(); if is_in_static_method && *env.cls_generics_mut().get(id.as_ref()).unwrap_or(&false) { raise_parsing_error( node, env, &syntax_error::cls_reified_generic_in_static_method, ); } } fn rfind(s: &[u8], mut i: usize, c: u8) -> Option<usize> { if i >= s.len() { return None; } i += 1; while i > 0 { i -= 1; if s[i] == c { return Some(i); } } None } fn prep_string2<'a>( nodes: &'a [Syntax<'a, PositionedToken<'a>, PositionedValue<'a>>], env: &mut Env<'a>, ) -> Result<(TokenOp, TokenOp)> { use TokenOp::*; let is_qoute = |c| c == b'\"' || c == b'`'; let start_is_qoute = |s: &[u8]| { (!s.is_empty() && is_qoute(s[0])) || (s.len() > 1 && (s[0] == b'b' && s[1] == b'\"')) }; let last_is_qoute = |s: &[u8]| !s.is_empty() && is_qoute(s[s.len() - 1]); let is_heredoc = |s: &[u8]| (s.len() > 3 && &s[0..3] == b"<<<"); let mut nodes = nodes.iter(); let first = nodes.next(); match first.map(|n| &n.children) { Some(Token(t)) => { let raise = |env| { raise_parsing_error(first.unwrap(), env, "Malformed String2 SyntaxList"); }; let text = t.text_raw(env.source_text()); if start_is_qoute(text) { let first_token_op = match text[0] { b'b' if text.len() > 2 => LeftTrim(2), _ if is_qoute(text[0]) && text.len() > 1 => LeftTrim(1), _ => Skip, }; if let Some(Token(t)) = nodes.last().map(|n| &n.children) { let last_text = t.text_raw(env.source_text()); if last_is_qoute(last_text) { let last_taken_op = match last_text.len() { n if n > 1 => RightTrim(1), _ => Skip, }; return Ok((first_token_op, last_taken_op)); } } raise(env); Ok((first_token_op, Noop)) } else if is_heredoc(text) { let trim_size = text.iter() .position(|c| *c == b'\n') .ok_or_else(|| Error::ParsingError { message: String::from("newline not found"), pos: p_pos(first.unwrap(), env), })? + 1; let first_token_op = match trim_size { _ if trim_size == text.len() => Skip, _ => LeftTrim(trim_size), }; if let Some(Token(t)) = nodes.last().map(|n| &n.children) { let text = t.text_raw(env.source_text()); let len = text.len(); if len != 0 { let n = (match rfind(text, len - 2, b'\n') { Some(n) => Ok(n), None => Err(Error::ParsingError { message: String::from("newline not found"), pos: p_pos(first.unwrap(), env), }), })?; let last_token_op = match n { 0 => Skip, _ => RightTrim(len - n), }; return Ok((first_token_op, last_token_op)); } } raise(env); Ok((first_token_op, Noop)) } else { Ok((Noop, Noop)) } } _ => Ok((Noop, Noop)), } } fn process_token_op<'a>(env: &mut Env<'a>, op: TokenOp, node: S<'a>) -> Result<Option<S<'a>>> { use TokenOp::*; match op { LeftTrim(n) => match &node.children { Token(t) => { let token = env.token_factory.trim_left(t, n); let node = env.arena.alloc(Syntax::make_token(token)); Ok(Some(node)) } _ => missing_syntax("token in operator", node, env), }, RightTrim(n) => match &node.children { Token(t) => { let token = env.token_factory.trim_right(t, n); let node = env.arena.alloc(Syntax::make_token(token)); Ok(Some(node)) } _ => missing_syntax("token in operator", node, env), }, _ => Ok(None), } } fn p_string2<'a>( nodes: &'a [Syntax<'a, PositionedToken<'a>, PositionedValue<'a>>], env: &mut Env<'a>, ) -> Result<Vec<ast::Expr>> { use TokenOp::*; let (head_op, tail_op) = prep_string2(nodes, env)?; let mut result = Vec::with_capacity(nodes.len()); let mut i = 0; let last = nodes.len() - 1; while i <= last { let op = match i { 0 => head_op, _ if i == last => tail_op, _ => Noop, }; if op == Skip { i += 1; continue; } let node = process_token_op(env, op, &nodes[i])?; let node = node.unwrap_or(&nodes[i]); if token_kind(node) == Some(TK::Dollar) && i < last { if let EmbeddedBracedExpression(_) = &nodes[i + 1].children { raise_parsing_error(&nodes[i + 1], env, &syntax_error::outside_dollar_str_interp); result.push(p_expr_with_loc( ExprLocation::InDoubleQuotedString, &nodes[i + 1], env, None, )?); i += 2; continue; } } result.push(p_expr_with_loc( ExprLocation::InDoubleQuotedString, node, env, None, )?); i += 1; } Ok(result) } fn p_expr<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Expr> { p_expr_with_loc(ExprLocation::TopLevel, node, env, None) } fn p_expr_for_function_call_arguments<'a>( node: S<'a>, env: &mut Env<'a>, ) -> Result<(ast::ParamKind, ast::Expr)> { match &node.children { DecoratedExpression(DecoratedExpressionChildren { decorator, expression, }) if token_kind(decorator) == Some(TK::Inout) => Ok(( ast::ParamKind::Pinout(p_pos(decorator, env)), p_expr(expression, env)?, )), _ => Ok((ast::ParamKind::Pnormal, p_expr(node, env)?)), } } fn p_expr_for_normal_argument<'a>( node: S<'a>, env: &mut Env<'a>, ) -> Result<(ast::ParamKind, ast::Expr)> { Ok((ast::ParamKind::Pnormal, p_expr(node, env)?)) } fn p_expr_with_loc<'a>( location: ExprLocation, node: S<'a>, env: &mut Env<'a>, parent_pos: Option<Pos>, ) -> Result<ast::Expr> { // We use location=CallReceiver to set PropOrMethod::IsMethod on ObjGet // But only if it is the immediate node. let location = match (location, &node.children) { ( ExprLocation::CallReceiver, MemberSelectionExpression(_) | SafeMemberSelectionExpression(_) | EmbeddedMemberSelectionExpression(_) | ScopeResolutionExpression(_), ) => location, (ExprLocation::CallReceiver, _) => ExprLocation::TopLevel, (_, _) => location, }; match &node.children { BracedExpression(c) => { // Either a dynamic method lookup on a dynamic value: // $foo->{$meth_name}(); // or an XHP splice. // <p id={$id}>hello</p>; // In both cases, unwrap, consistent with parentheses. p_expr_with_loc(location, &c.expression, env, parent_pos) } ParenthesizedExpression(c) => p_expr_with_loc(location, &c.expression, env, parent_pos), _ => { let pos = p_pos(node, env); let expr_ = p_expr_recurse(location, node, env, parent_pos)?; Ok(ast::Expr::new((), pos, expr_)) } } } fn p_expr_lit<'a>( location: ExprLocation, _parent: S<'a>, expr: S<'a>, env: &mut Env<'a>, ) -> Result<Expr_> { match &expr.children { Token(_) => { let s = expr.text(env.indexed_source_text.source_text()); let check_lint_err = |e: &mut Env<'a>, s: &str, expected: &str| { if !e.codegen() && s != expected { raise_lint_error(e, LintError::lowercase_constant(p_pos(expr, e), s)); } }; match (location, token_kind(expr)) { (ExprLocation::InDoubleQuotedString, _) if env.codegen() => { Ok(Expr_::String(mk_str(expr, env, s, unesc_dbl))) } (_, Some(TK::DecimalLiteral)) | (_, Some(TK::OctalLiteral)) | (_, Some(TK::HexadecimalLiteral)) | (_, Some(TK::BinaryLiteral)) => { let s = s.replace('_', ""); match parse_int(&s) { Err(ParseIntError::OutOfRange) => { raise_parsing_error(expr, env, &syntax_error::out_of_int_range(&s)); } Err(ParseIntError::InvalidDigit(int_kind)) => { raise_parsing_error( expr, env, &syntax_error::invalid_integer_digit(int_kind), ); missing_syntax(&format!("{}", int_kind), expr, env)?; } Err(ParseIntError::Empty) => { missing_syntax("int literal", expr, env)?; } Ok(_) => {} } Ok(Expr_::Int(s)) } (_, Some(TK::FloatingLiteral)) => { // f64::from_str accepts more string than Hacklang, invalid Hack float literal // is caught in lexer. let s = s.replace('_', ""); if f64::from_str(&s).is_err() { raise_parsing_error(expr, env, &syntax_error::out_of_float_range(&s)) } Ok(Expr_::Float(s)) } (_, Some(TK::SingleQuotedStringLiteral)) => { Ok(Expr_::String(mk_str(expr, env, s, unescape_single))) } (_, Some(TK::DoubleQuotedStringLiteral)) => { Ok(Expr_::String(mk_str(expr, env, s, unescape_double))) } (_, Some(TK::HeredocStringLiteral)) => { Ok(Expr_::String(mk_str(expr, env, s, unescape_heredoc))) } (_, Some(TK::NowdocStringLiteral)) => { Ok(Expr_::String(mk_str(expr, env, s, unescape_nowdoc))) } (_, Some(TK::NullLiteral)) => { check_lint_err(env, s, literal::NULL); Ok(Expr_::Null) } (_, Some(TK::BooleanLiteral)) => { if s.eq_ignore_ascii_case(literal::FALSE) { check_lint_err(env, s, literal::FALSE); Ok(Expr_::False) } else if s.eq_ignore_ascii_case(literal::TRUE) { check_lint_err(env, s, literal::TRUE); Ok(Expr_::True) } else { missing_syntax(&format!("boolean (not: {})", s), expr, env) } } _ => missing_syntax("literal", expr, env), } } SyntaxList(ts) => Ok(Expr_::String2(p_string2(ts, env)?)), _ => missing_syntax("literal expressoin", expr, env), } } fn p_expr_recurse<'a>( location: ExprLocation, node: S<'a>, env: &mut Env<'a>, parent_pos: Option<Pos>, ) -> Result<Expr_> { if *env.exp_recursion_depth() >= EXP_RECURSION_LIMIT { Err(Error::ParsingError { message: "Expression recursion limit reached".into(), pos: parent_pos.unwrap_or_else(|| env.mk_none_pos()), }) } else { *env.exp_recursion_depth() += 1; let r = stack_limit::maybe_grow(|| p_expr_impl(location, node, env, parent_pos)); *env.exp_recursion_depth() -= 1; r } } fn split_args_vararg<'a>( arg_list_node: S<'a>, e: &mut Env<'a>, ) -> Result<(Vec<(ast::ParamKind, ast::Expr)>, Option<ast::Expr>)> { let mut arg_list: Vec<_> = arg_list_node.syntax_node_to_list_skip_separator().collect(); if let Some(last_arg) = arg_list.last() { if let DecoratedExpression(c) = &last_arg.children { if token_kind(&c.decorator) == Some(TK::DotDotDot) { let _ = arg_list.pop(); let args: Result<Vec<_>, _> = arg_list .iter() .map(|a| p_expr_for_function_call_arguments(a, e)) .collect(); let args = args?; let vararg = p_expr(&c.expression, e)?; return Ok((args, Some(vararg))); } } } Ok(( could_map(arg_list_node, e, p_expr_for_function_call_arguments)?, None, )) } fn p_expr_impl<'a>( location: ExprLocation, node: S<'a>, env: &mut Env<'a>, parent_pos: Option<Pos>, ) -> Result<Expr_> { let pos = match parent_pos { Some(pos) => pos, None => p_pos(node, env), }; match &node.children { LambdaExpression(c) => p_lambda_expression(c, env, pos), BracedExpression(c) => p_expr_recurse(location, &c.expression, env, None), EmbeddedBracedExpression(c) => p_expr_recurse(location, &c.expression, env, Some(pos)), ParenthesizedExpression(c) => p_expr_recurse(location, &c.expression, env, None), DictionaryIntrinsicExpression(c) => { let ty_args = expand_type_args(&c.explicit_type, env)?; let hints = if ty_args.len() == 2 { let mut tys = ty_args.into_iter(); Some(( ast::Targ((), tys.next().unwrap()), ast::Targ((), tys.next().unwrap()), )) } else if ty_args.is_empty() { None } else { raise_parsing_error( &c.explicit_type, env, "`dict` takes exactly two type arguments", ); None }; Ok(Expr_::mk_key_val_collection( (p_pos(&c.keyword, env), aast::KvcKind::Dict), hints, could_map(&c.members, env, p_field)?, )) } KeysetIntrinsicExpression(c) => { let mut ty_args = expand_type_args(&c.explicit_type, env)?; let hint = if ty_args.len() == 1 { Some(ast::Targ((), ty_args.pop().unwrap())) } else if ty_args.is_empty() { None } else { raise_parsing_error( &c.explicit_type, env, "`keyset` takes exactly one type argument", ); None }; Ok(Expr_::mk_val_collection( (p_pos(&c.keyword, env), aast::VcKind::Keyset), hint, could_map(&c.members, env, p_expr)?, )) } VectorIntrinsicExpression(c) => { let mut ty_args = expand_type_args(&c.explicit_type, env)?; let hint = if ty_args.len() == 1 { Some(ast::Targ((), ty_args.pop().unwrap())) } else if ty_args.is_empty() { None } else { raise_parsing_error( &c.explicit_type, env, "`vec` takes exactly one type argument", ); None }; Ok(Expr_::mk_val_collection( (p_pos(&c.keyword, env), aast::VcKind::Vec), hint, could_map(&c.members, env, p_expr)?, )) } CollectionLiteralExpression(c) => p_collection_literal_expr(node, c, env), VarrayIntrinsicExpression(c) => p_varray_intrinsic_expr(node, c, env), DarrayIntrinsicExpression(c) => p_darray_intrinsic_expr(node, c, env), ListExpression(c) => p_list_expr(node, c, env), EvalExpression(c) => p_special_call(&c.keyword, &c.argument, env), IssetExpression(c) => p_special_call(&c.keyword, &c.argument_list, env), TupleExpression(c) => p_tuple_expr(c, env), FunctionCallExpression(c) => p_function_call_expr(c, env), FunctionPointerExpression(c) => p_function_pointer_expr(node, c, env), QualifiedName(_) => p_qualified_name(node, env, location), VariableExpression(c) => p_variable_expr(c, env, pos), PipeVariableExpression(_) => p_pipe_variable_expr(pos), InclusionExpression(c) => p_inclusion_expr(c, env), MemberSelectionExpression(c) => p_obj_get(location, &c.object, &c.operator, &c.name, env), SafeMemberSelectionExpression(c) => { p_obj_get(location, &c.object, &c.operator, &c.name, env) } EmbeddedMemberSelectionExpression(c) => { p_obj_get(location, &c.object, &c.operator, &c.name, env) } PrefixUnaryExpression(_) | PostfixUnaryExpression(_) | DecoratedExpression(_) => { p_pre_post_unary_decorated_expr(node, env, pos, location) } BinaryExpression(c) => p_binary_expr(c, env, pos, location), Token(t) => p_token(node, t, env, location), YieldExpression(c) => p_yield_expr(node, c, env, pos, location), ScopeResolutionExpression(c) => p_scope_resolution_expr(node, c, env, pos, location), CastExpression(c) => p_cast_expr(c, env), PrefixedCodeExpression(c) => p_prefixed_code_expr(c, env), ETSpliceExpression(c) => p_et_splice_expr(&c.expression, env, location), ConditionalExpression(c) => p_conditional_expr(c, env), SubscriptExpression(c) => p_subscript_expr(c, env), EmbeddedSubscriptExpression(c) => p_embedded_subscript_expr(c, env, location), ShapeExpression(c) => p_shape_expr(c, env), ObjectCreationExpression(c) => p_expr_recurse(location, &c.object, env, Some(pos)), ConstructorCall(c) => p_constructor_call(node, c, env, pos), GenericTypeSpecifier(c) => p_generic_type_specifier(c, env), LiteralExpression(c) => p_expr_lit(location, node, &c.expression, env), PrefixedStringExpression(c) => p_prefixed_string_expr(node, c, env), IsExpression(c) => p_is_expr(&c.left_operand, &c.right_operand, env), AsExpression(c) => p_as_expr(&c.left_operand, &c.right_operand, env, false), NullableAsExpression(c) => p_as_expr(&c.left_operand, &c.right_operand, env, true), UpcastExpression(c) => p_upcast_expr(&c.left_operand, &c.right_operand, env), AnonymousFunction(c) => p_anonymous_function(node, c, env), AwaitableCreationExpression(c) => p_awaitable_creation_expr(c, env, pos), XHPExpression(c) if c.open.is_xhp_open() => p_xhp_expr(c, env), EnumClassLabelExpression(c) => p_enum_class_label_expr(c, env), PackageExpression(p) => p_package_expr(p, env), _ => { raise_missing_syntax("expression", node, env); Ok(Expr_::Null) } } } fn p_lambda_expression<'a>( c: &'a LambdaExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, ) -> Result<Expr_> { let suspension_kind = mk_suspension_kind(&c.async_); let (params, (ctxs, unsafe_ctxs), readonly_ret, ret) = match &c.signature.children { LambdaSignature(c) => { let params = could_map(&c.parameters, env, p_fun_param)?; let readonly_ret = map_optional(&c.readonly_return, env, p_readonly)?; let ctxs = p_contexts( &c.contexts, env, // TODO(coeffects) Lambdas may be able to support this:: contexts Some((&syntax_error::lambda_effect_polymorphic("A lambda"), false)), ); let unsafe_ctxs = ctxs.clone(); let ret = map_optional(&c.type_, env, p_hint)?; (params, (ctxs, unsafe_ctxs), readonly_ret, ret) } Token(_) => { let ast::Id(p, n) = pos_name(&c.signature, env)?; ( vec![ast::FunParam { annotation: (), type_hint: ast::TypeHint((), None), is_variadic: false, pos: p, name: n, expr: None, callconv: ast::ParamKind::Pnormal, readonly: None, user_attributes: Default::default(), visibility: None, }], (None, None), None, None, ) } _ => missing_syntax("lambda signature", &c.signature, env)?, }; let (body, yield_) = if !c.body.is_compound_statement() { map_yielding(&c.body, env, p_function_body)? } else { let mut env1 = Env::clone_and_unset_toplevel_if_toplevel(env); map_yielding(&c.body, env1.as_mut(), p_function_body)? }; let external = c.body.is_external(); let fun = ast::Fun_ { span: pos, readonly_this: None, // filled in by mk_unop annotation: (), readonly_ret, ret: ast::TypeHint((), ret), body: ast::FuncBody { fb_ast: body }, fun_kind: mk_fun_kind(suspension_kind, yield_), params, ctxs, unsafe_ctxs, user_attributes: p_user_attributes(&c.attribute_spec, env), external, doc_comment: None, }; Ok(Expr_::mk_lfun(fun, vec![])) } fn p_collection_literal_expr<'a>( node: S<'a>, c: &'a CollectionLiteralExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let (collection_name, hints) = match &c.name.children { SimpleTypeSpecifier(c) => (pos_name(&c.specifier, env)?, None), GenericTypeSpecifier(c) => { let hints = expand_type_args(&c.argument_list, env)?; let hints = check_intrinsic_type_arg_varity(node, env, hints); (pos_name(&c.class_type, env)?, hints) } _ => (pos_name(&c.name, env)?, None), }; Ok(Expr_::mk_collection( collection_name, hints, could_map(&c.initializers, env, p_afield)?, )) } fn p_varray_intrinsic_expr<'a>( node: S<'a>, c: &'a VarrayIntrinsicExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let hints = expand_type_args(&c.explicit_type, env)?; let hints = check_intrinsic_type_arg_varity(node, env, hints); let targ = match hints { Some(ast::CollectionTarg::CollectionTV(ty)) => Some(ty), None => None, _ => missing_syntax("VarrayIntrinsicExpression type args", node, env)?, }; Ok(Expr_::mk_varray(targ, could_map(&c.members, env, p_expr)?)) } fn p_darray_intrinsic_expr<'a>( node: S<'a>, c: &'a DarrayIntrinsicExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let hints = expand_type_args(&c.explicit_type, env)?; let hints = check_intrinsic_type_arg_varity(node, env, hints); match hints { Some(ast::CollectionTarg::CollectionTKV(tk, tv)) => Ok(Expr_::mk_darray( Some((tk, tv)), could_map(&c.members, env, p_member)?, )), None => Ok(Expr_::mk_darray( None, could_map(&c.members, env, p_member)?, )), _ => missing_syntax("DarrayIntrinsicExpression type args", node, env), } } fn p_list_expr<'a>( _node: S<'a>, c: &'a ListExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { /* TODO: Or tie in with other intrinsics and post-process to List */ let p_binder_or_ignore = |n: S<'a>, e: &mut Env<'a>| -> Result<ast::Expr> { match &n.children { Missing => Ok(Expr::new((), e.mk_none_pos(), Expr_::Omitted)), _ => p_expr(n, e), } }; Ok(Expr_::List(could_map(&c.members, env, p_binder_or_ignore)?)) } fn p_tuple_expr<'a>( c: &'a TupleExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { Ok(Expr_::mk_tuple(could_map(&c.items, env, p_expr)?)) } fn p_function_call_expr<'a>( c: &'a FunctionCallExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let recv = &c.receiver; let args = &c.argument_list; let targs = match (&recv.children, &c.type_args.children) { (_, TypeArguments(c)) => could_map(&c.types, env, p_targ)?, /* TODO might not be needed */ (GenericTypeSpecifier(c), _) => match &c.argument_list.children { TypeArguments(c) => could_map(&c.types, env, p_targ)?, _ => vec![], }, _ => vec![], }; // Mark expression as CallReceiver so that we can correctly set // PropOrMethod field in ObjGet and ClassGet let recv = p_expr_with_loc(ExprLocation::CallReceiver, recv, env, None)?; let (args, varargs) = split_args_vararg(args, env)?; Ok(Expr_::mk_call(ast::CallExpr { func: recv, targs, args, unpacked_arg: varargs, })) } fn p_function_pointer_expr<'a>( node: S<'a>, c: &'a FunctionPointerExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let targs = match &c.type_args.children { TypeArguments(c) => could_map(&c.types, env, p_targ)?, _ => vec![], }; let recv = p_expr(&c.receiver, env)?; match &recv.2 { Expr_::Id(id) => Ok(Expr_::mk_function_pointer( aast::FunctionPtrId::FPId(*(id.to_owned())), targs, )), Expr_::ClassConst(c) => { if let aast::ClassId_::CIexpr(Expr(_, _, Expr_::Id(_))) = (c.0).2 { Ok(Expr_::mk_function_pointer( aast::FunctionPtrId::FPClassConst(c.0.to_owned(), c.1.to_owned()), targs, )) } else { raise_parsing_error(node, env, &syntax_error::function_pointer_bad_recv); missing_syntax("function or static method", node, env) } } _ => { raise_parsing_error(node, env, &syntax_error::function_pointer_bad_recv); missing_syntax("function or static method", node, env) } } } fn p_qualified_name<'a>(node: S<'a>, env: &mut Env<'a>, location: ExprLocation) -> Result<Expr_> { match location { ExprLocation::InDoubleQuotedString => { let ast::Id(_, n) = pos_qualified_name(node, env)?; Ok(Expr_::String(n.into())) } _ => Ok(Expr_::mk_id(pos_qualified_name(node, env)?)), } } fn p_variable_expr<'a>( c: &'a VariableExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, ) -> Result<Expr_> { Ok(Expr_::mk_lvar(lid_from_pos_name(pos, &c.expression, env)?)) } fn p_pipe_variable_expr(pos: Pos) -> Result<Expr_> { Ok(Expr_::mk_lvar(mk_lid( pos, special_idents::DOLLAR_DOLLAR.into(), ))) } fn p_inclusion_expr<'a>( c: &'a InclusionExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { Ok(Expr_::mk_import( p_import_flavor(&c.require, env)?, p_expr(&c.filename, env)?, )) } fn p_pre_post_unary_decorated_expr<'a>( node: S<'a>, env: &mut Env<'a>, pos: Pos, location: ExprLocation, ) -> Result<Expr_> { let (operand, op, postfix) = match &node.children { PrefixUnaryExpression(c) => (&c.operand, &c.operator, false), PostfixUnaryExpression(c) => (&c.operand, &c.operator, true), DecoratedExpression(c) => (&c.expression, &c.decorator, false), _ => missing_syntax("unary exppr", node, env)?, }; /** * FFP does not destinguish between ++$i and $i++ on the level of token * kind annotation. Prevent duplication by switching on `postfix` for * the two operatores for which AST /does/ differentiate between * fixities. */ use ast::Uop::*; let mk_unop = |op, e| Ok(Expr_::mk_unop(op, e)); let op_kind = token_kind(op); if let Some(TK::At) = op_kind { if env.parser_options.po_disallow_silence { raise_parsing_error(op, env, &syntax_error::no_silence); } if env.codegen() { let expr = p_expr(operand, env)?; mk_unop(Usilence, expr) } else { let expr = p_expr_with_loc(ExprLocation::TopLevel, operand, env, Some(pos))?; Ok(expr.2) } } else { let expr = p_expr(operand, env)?; match op_kind { Some(TK::PlusPlus) if postfix => mk_unop(Upincr, expr), Some(TK::MinusMinus) if postfix => mk_unop(Updecr, expr), Some(TK::PlusPlus) => mk_unop(Uincr, expr), Some(TK::MinusMinus) => mk_unop(Udecr, expr), Some(TK::Exclamation) => mk_unop(Unot, expr), Some(TK::Tilde) => mk_unop(Utild, expr), Some(TK::Plus) => mk_unop(Uplus, expr), Some(TK::Minus) => mk_unop(Uminus, expr), Some(TK::Await) => Ok(lift_await(pos, expr, env, location)), Some(TK::Readonly) => Ok(process_readonly_expr(expr)), Some(TK::Clone) => Ok(Expr_::mk_clone(expr)), Some(TK::Print) => Ok(Expr_::mk_call(ast::CallExpr { func: Expr::new( (), pos.clone(), Expr_::mk_id(ast::Id(pos, special_functions::ECHO.into())), ), targs: vec![], args: vec![(ast::ParamKind::Pnormal, expr)], unpacked_arg: None, })), Some(TK::Dollar) => { raise_parsing_error(op, env, &syntax_error::invalid_variable_name); Ok(Expr_::Omitted) } _ => missing_syntax("unary operator", node, env), } } } fn p_binary_expr<'a>( c: &'a BinaryExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, location: ExprLocation, ) -> Result<Expr_> { use ExprLocation::*; let left = p_expr_with_loc(ExprLocation::TopLevel, &c.left_operand, env, None)?; let rlocation = match (token_kind(&c.operator), location) { (Some(TK::Equal), AsStatement) => RightOfAssignment, (Some(TK::Equal), UsingStatement) => RightOfAssignmentInUsingStatement, _ => TopLevel, }; let right = p_expr_with_loc(rlocation, &c.right_operand, env, None)?; p_bop(pos, &c.operator, left, right, env) } fn p_token<'a>( node: S<'a>, t: &'a PositionedToken<'_>, env: &mut Env<'a>, location: ExprLocation, ) -> Result<Expr_> { use ExprLocation::*; match (location, t.kind()) { (MemberSelect, TK::Variable) => mk_lvar(node, env), (InDoubleQuotedString, TK::HeredocStringLiteral) | (InDoubleQuotedString, TK::HeredocStringLiteralHead) | (InDoubleQuotedString, TK::HeredocStringLiteralTail) => Ok(Expr_::String( wrap_unescaper(text_str(node, env), unescape_heredoc, p_pos(node, env))?, )), (InDoubleQuotedString, _) => Ok(Expr_::String(wrap_unescaper( text_str(node, env), unesc_dbl, p_pos(node, env), )?)), (MemberSelect, _) | (TopLevel, _) | (AsStatement, _) | (UsingStatement, _) | (RightOfAssignment, _) | (RightOfAssignmentInUsingStatement, _) | (RightOfReturn, _) | (CallReceiver, _) => Ok(Expr_::mk_id(pos_name(node, env)?)), } } fn p_yield_expr<'a>( node: S<'a>, c: &'a YieldExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, location: ExprLocation, ) -> Result<Expr_> { use ExprLocation::*; env.saw_yield = true; if location != AsStatement && location != RightOfAssignment && location != RightOfAssignmentInUsingStatement { raise_parsing_error(node, env, &syntax_error::invalid_yield); } if c.operand.is_missing() { Ok(Expr_::mk_yield(ast::Afield::AFvalue(Expr::new( (), pos, Expr_::Null, )))) } else { Ok(Expr_::mk_yield(p_afield(&c.operand, env)?)) } } fn p_scope_resolution_expr<'a>( node: S<'a>, c: &'a ScopeResolutionExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, location: ExprLocation, ) -> Result<Expr_> { let qual = p_expr(&c.qualifier, env)?; if let Expr_::Id(id) = &qual.2 { fail_if_invalid_reified_generic(node, env, &id.1); } match &c.name.children { Token(token) if token.kind() == TK::Variable => { if location == ExprLocation::CallReceiver { let ast::Id(p, name) = pos_name(&c.name, env)?; Ok(Expr_::mk_class_get( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), ast::ClassGetExpr::CGexpr(Expr( (), p.clone(), Expr_::mk_lvar(ast::Lid::new(p, name)), )), ast::PropOrMethod::IsMethod, )) } else { let ast::Id(p, name) = pos_name(&c.name, env)?; Ok(Expr_::mk_class_get( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), ast::ClassGetExpr::CGstring((p, name)), ast::PropOrMethod::IsProp, )) } } _ => { let Expr(_, p, expr_) = p_expr(&c.name, env)?; match expr_ { Expr_::String(id) => Ok(Expr_::mk_class_const( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), ( p.clone(), String::from_utf8(id.into()).map_err(|e| Error::ParsingError { message: e.to_string(), pos: p, })?, ), )), Expr_::Id(id) => { let ast::Id(p, n) = *id; Ok(Expr_::mk_class_const( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), (p, n), )) } Expr_::Lvar(id) if location != ExprLocation::CallReceiver => { let ast::Lid(p, (_, n)) = *id; Ok(Expr_::mk_class_get( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), ast::ClassGetExpr::CGstring((p, n)), ast::PropOrMethod::IsProp, )) } _ => Ok(Expr_::mk_class_get( ast::ClassId((), pos, ast::ClassId_::CIexpr(qual)), ast::ClassGetExpr::CGexpr(Expr((), p, expr_)), match location { ExprLocation::CallReceiver => ast::PropOrMethod::IsMethod, _ => ast::PropOrMethod::IsProp, }, )), } } } } fn p_cast_expr<'a>( c: &'a CastExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { Ok(Expr_::mk_cast( p_hint(&c.type_, env)?, p_expr(&c.operand, env)?, )) } fn p_prefixed_code_expr<'a>( c: &'a PrefixedCodeExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let src_expr = if !c.body.is_compound_statement() { p_expr(&c.body, env)? } else { let pos = p_pos(&c.body, env); // Take the body and create a no argument lambda expression let (body, yield_) = map_yielding(&c.body, env, p_function_body)?; let external = c.body.is_external(); let fun = ast::Fun_ { span: pos.clone(), readonly_this: None, // filled in by mk_unop annotation: (), readonly_ret: None, ret: ast::TypeHint((), None), body: ast::FuncBody { fb_ast: body }, fun_kind: mk_fun_kind(SuspensionKind::SKSync, yield_), params: vec![], ctxs: None, unsafe_ctxs: None, user_attributes: ast::UserAttributes(vec![]), external, doc_comment: None, }; let recv = ast::Expr::new((), pos.clone(), Expr_::mk_lfun(fun, vec![])); // Immediately invoke the lambda by wrapping in a call expression node let expr = Expr_::mk_call(ast::CallExpr { func: recv, targs: vec![], args: vec![], unpacked_arg: None, }); ast::Expr::new((), pos, expr) }; let hint = p_hint(&c.prefix, env)?; let desugar_result = desugar(&hint, src_expr, env); for (pos, msg) in desugar_result.errors { raise_parsing_error_pos(&pos, env, &msg); } Ok(desugar_result.expr.2) } fn p_et_splice_expr<'a>(expr: S<'a>, env: &mut Env<'a>, location: ExprLocation) -> Result<Expr_> { let inner_pos = p_pos(expr, env); let inner_expr_ = p_expr_recurse(location, expr, env, None)?; let inner_expr = ast::Expr::new((), inner_pos, inner_expr_); Ok(Expr_::ETSplice(Box::new(inner_expr))) } fn p_conditional_expr<'a>( c: &'a ConditionalExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let alter = p_expr(&c.alternative, env)?; let consequence = map_optional(&c.consequence, env, p_expr)?; let condition = p_expr(&c.test, env)?; Ok(Expr_::mk_eif(condition, consequence, alter)) } fn p_subscript_expr<'a>( c: &'a SubscriptExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { Ok(Expr_::mk_array_get( p_expr(&c.receiver, env)?, map_optional(&c.index, env, p_expr)?, )) } fn p_embedded_subscript_expr<'a>( c: &'a EmbeddedSubscriptExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, location: ExprLocation, ) -> Result<Expr_> { Ok(Expr_::mk_array_get( p_expr(&c.receiver, env)?, map_optional(&c.index, env, |n, e| p_expr_with_loc(location, n, e, None))?, )) } fn p_constructor_call<'a>( node: S<'a>, c: &'a ConstructorCallChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, ) -> Result<Expr_> { let (args, varargs) = split_args_vararg(&c.argument_list, env)?; let (e, hl) = match &c.type_.children { GenericTypeSpecifier(c) => { let name = pos_name(&c.class_type, env)?; let hints = match &c.argument_list.children { TypeArguments(c) => could_map(&c.types, env, p_targ)?, _ => missing_syntax("generic type arguments", &c.argument_list, env)?, }; (mk_id_expr(name), hints) } SimpleTypeSpecifier(_) => { let name = pos_name(&c.type_, env)?; (mk_id_expr(name), vec![]) } _ => (p_expr(&c.type_, env)?, vec![]), }; if let Expr_::Id(name) = &e.2 { fail_if_invalid_reified_generic(node, env, &name.1); fail_if_invalid_class_creation(node, env, &name.1); } Ok(Expr_::mk_new( ast::ClassId((), pos, ast::ClassId_::CIexpr(e)), hl, args.into_iter().map(|(_, e)| e).collect(), varargs, (), )) } fn p_generic_type_specifier<'a>( c: &'a GenericTypeSpecifierChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { if !c.argument_list.is_missing() { raise_parsing_error(&c.argument_list, env, &syntax_error::targs_not_allowed) } Ok(Expr_::mk_id(pos_name(&c.class_type, env)?)) } fn p_shape_expr<'a>( c: &'a ShapeExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { Ok(Expr_::Shape(could_map(&c.fields, env, |n, e| { map_shape_expression_field(n, e, p_expr) })?)) } fn p_prefixed_string_expr<'a>( node: S<'a>, c: &'a PrefixedStringExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { /* Temporarily allow only`re`- prefixed strings */ let name_text = text(&c.name, env); if name_text != "re" { raise_parsing_error(node, env, &syntax_error::non_re_prefix); } Ok(Expr_::mk_prefixed_string(name_text, p_expr(&c.str, env)?)) } fn p_is_expr<'a>(left: S<'a>, right: S<'a>, env: &mut Env<'a>) -> Result<Expr_> { Ok(Expr_::mk_is(p_expr(left, env)?, p_hint(right, env)?)) } fn p_as_expr<'a>(left: S<'a>, right: S<'a>, env: &mut Env<'a>, nullable: bool) -> Result<Expr_> { Ok(Expr_::mk_as( p_expr(left, env)?, p_hint(right, env)?, nullable, )) } fn p_upcast_expr<'a>(left: S<'a>, right: S<'a>, env: &mut Env<'a>) -> Result<Expr_> { Ok(Expr_::mk_upcast(p_expr(left, env)?, p_hint(right, env)?)) } fn p_use_var<'a>(n: S<'a>, e: &mut Env<'a>) -> Result<ast::CaptureLid> { match &n.children { Token(_) => { let lid = mk_name_lid(n, e)?; Ok(ast::CaptureLid((), lid)) } _ => missing_syntax("use variable", n, e), } } fn p_anonymous_function<'a>( node: S<'a>, c: &'a AnonymousFunctionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let ctxs = p_contexts( &c.ctx_list, env, // TODO(coeffects) Anonymous functions may be able to support this:: contexts Some(( &syntax_error::lambda_effect_polymorphic("An anonymous function"), false, )), ); let unsafe_ctxs = ctxs.clone(); let p_use = |n: S<'a>, e: &mut Env<'a>| match &n.children { AnonymousFunctionUseClause(c) => could_map(&c.variables, e, p_use_var), _ => Ok(vec![]), }; let suspension_kind = mk_suspension_kind(&c.async_keyword); let (body, yield_) = { let mut env1 = Env::clone_and_unset_toplevel_if_toplevel(env); map_yielding(&c.body, env1.as_mut(), p_function_body)? }; let doc_comment = extract_docblock(node, env).or_else(|| env.top_docblock().clone()); let user_attributes = p_user_attributes(&c.attribute_spec, env); let external = c.body.is_external(); let params = could_map(&c.parameters, env, p_fun_param)?; let fun = ast::Fun_ { span: p_pos(node, env), readonly_this: None, // set in process_readonly_expr annotation: (), readonly_ret: map_optional(&c.readonly_return, env, p_readonly)?, ret: ast::TypeHint((), map_optional(&c.type_, env, p_hint)?), body: ast::FuncBody { fb_ast: body }, fun_kind: mk_fun_kind(suspension_kind, yield_), params, ctxs, unsafe_ctxs, user_attributes, external, doc_comment, }; let use_ = p_use(&c.use_, env).unwrap_or_else(|_| vec![]); Ok(Expr_::mk_efun(ast::Efun { fun, use_, closure_class_name: None, })) } fn p_awaitable_creation_expr<'a>( c: &'a AwaitableCreationExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, pos: Pos, ) -> Result<Expr_> { let suspension_kind = mk_suspension_kind(&c.async_); let (blk, yld) = map_yielding(&c.compound_statement, env, p_function_body)?; let user_attributes = p_user_attributes(&c.attribute_spec, env); let external = c.compound_statement.is_external(); let body = ast::Fun_ { span: pos.clone(), annotation: (), readonly_this: None, // set in process_readonly_expr readonly_ret: None, // TODO: awaitable creation expression ret: ast::TypeHint((), None), body: ast::FuncBody { fb_ast: if blk.is_empty() { let pos = p_pos(&c.compound_statement, env); ast::Block(vec![ast::Stmt::noop(pos)]) } else { blk }, }, fun_kind: mk_fun_kind(suspension_kind, yld), params: vec![], ctxs: None, // TODO(T70095684) unsafe_ctxs: None, // TODO(T70095684) user_attributes, external, doc_comment: None, }; Ok(Expr_::mk_call(ast::CallExpr { func: Expr::new((), pos, Expr_::mk_lfun(body, vec![])), targs: vec![], args: vec![], unpacked_arg: None, })) } fn p_xhp_expr<'a>( c: &'a XHPExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { if let XHPOpen(c1) = &c.open.children { let name = pos_name(&c1.name, env)?; let attrs = could_map(&c1.attributes, env, p_xhp_attr)?; let exprs = aggregate_xhp_tokens(env, &c.body)? .iter() .map(|n| p_xhp_embedded(n, env, unesc_xhp)) .collect::<Result<Vec<_>, _>>()?; let id = if env.empty_ns_env.disable_xhp_element_mangling { ast::Id(name.0, name.1) } else { ast::Id(name.0, String::from(":") + &name.1) }; Ok(Expr_::mk_xml( // TODO: update pos_name to support prefix id, attrs, exprs, )) } else { missing_syntax("XHP open", &c.open, env) } } fn p_enum_class_label_expr<'a>( c: &'a EnumClassLabelExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { use syntax_kind::SyntaxKind; /* Foo#Bar can be the following: * - short version: Foo is None/missing and we only have #Bar * - Foo is a name -> fully qualified Foo#Bar */ let ast::Id(_label_pos, label_name) = pos_name(&c.expression, env)?; let qual = if c.qualifier.is_missing() { None } else { let name = pos_name(&c.qualifier, env)?; Some(name) }; match c.qualifier.kind() { SyntaxKind::Missing => {} SyntaxKind::QualifiedName => {} SyntaxKind::Token(TK::Name) => {} _ => raise_parsing_error( &c.qualifier, env, &syntax_error::invalid_enum_class_label_qualifier, ), }; Ok(Expr_::mk_enum_class_label(qual, label_name)) } fn p_package_expr<'a>( p: &'a PackageExpressionChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, env: &mut Env<'a>, ) -> Result<Expr_> { let id = pos_name(&p.name, env)?; Ok(Expr_::mk_package(id)) } fn mk_lid(p: Pos, s: String) -> ast::Lid { ast::Lid(p, (0, s)) } fn mk_name_lid<'a>(name: S<'a>, env: &mut Env<'a>) -> Result<ast::Lid> { let name = pos_name(name, env)?; Ok(mk_lid(name.0, name.1)) } fn mk_lvar<'a>(name: S<'a>, env: &mut Env<'a>) -> Result<Expr_> { Ok(Expr_::mk_lvar(mk_name_lid(name, env)?)) } fn mk_id_expr(name: ast::Sid) -> ast::Expr { ast::Expr::new((), name.0.clone(), Expr_::mk_id(name)) } fn p_special_call<'a>(recv: S<'a>, args: S<'a>, e: &mut Env<'a>) -> Result<Expr_> { // Mark expression as CallReceiver so that we can correctly set // PropOrMethod field in ObjGet and ClassGet let recv = p_expr_with_loc(ExprLocation::CallReceiver, recv, e, None)?; let (args, varargs) = split_args_vararg(args, e)?; Ok(Expr_::mk_call(ast::CallExpr { func: recv, targs: vec![], args, unpacked_arg: varargs, })) } fn p_obj_get<'a>( location: ExprLocation, recv: S<'a>, op: S<'a>, name: S<'a>, e: &mut Env<'a>, ) -> Result<Expr_> { if recv.is_object_creation_expression() && !e.codegen() { raise_parsing_error(recv, e, &syntax_error::invalid_constructor_method_call); } let recv = p_expr(recv, e)?; let name = p_expr_with_loc(ExprLocation::MemberSelect, name, e, None)?; let op = p_null_flavor(op, e)?; Ok(Expr_::mk_obj_get( recv, name, op, match location { ExprLocation::CallReceiver => ast::PropOrMethod::IsMethod, _ => ast::PropOrMethod::IsProp, }, )) } fn p_xhp_embedded<'a, F>(node: S<'a>, env: &mut Env<'a>, escaper: F) -> Result<ast::Expr> where F: FnOnce(&[u8]) -> Vec<u8>, { if let Some(kind) = token_kind(node) { if env.codegen() && TK::XHPStringLiteral == kind { let p = p_pos(node, env); /* for XHP string literals (attribute values) just extract value from quotes and decode HTML entities */ let text = html_entities::decode(get_quoted_content(node.full_text(env.source_text()))); Ok(ast::Expr::new((), p, Expr_::make_string(text))) } else if env.codegen() && TK::XHPBody == kind { let p = p_pos(node, env); /* for XHP body - only decode HTML entities */ let text = html_entities::decode(&unesc_xhp(node.full_text(env.source_text()))); Ok(ast::Expr::new((), p, Expr_::make_string(text))) } else { let p = p_pos(node, env); let s = escaper(node.full_text(env.source_text())); Ok(ast::Expr::new((), p, Expr_::make_string(s))) } } else { p_expr(node, env) } } fn p_xhp_attr<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::XhpAttribute> { match &node.children { XHPSimpleAttribute(c) => { let attr_expr = &c.expression; let name = p_pstring(&c.name, env)?; let expr = if attr_expr.is_braced_expression() && env.file_mode() == file_info::Mode::Mhhi && !env.codegen() { ast::Expr::new((), env.mk_none_pos(), Expr_::Null) } else { p_xhp_embedded(attr_expr, env, unesc_xhp_attr)? }; let xhp_simple = ast::XhpSimple { name, type_: (), expr, }; Ok(ast::XhpAttribute::XhpSimple(xhp_simple)) } XHPSpreadAttribute(c) => { let expr = p_xhp_embedded(&c.expression, env, unesc_xhp)?; Ok(ast::XhpAttribute::XhpSpread(expr)) } _ => missing_syntax("XHP attribute", node, env), } } fn aggregate_xhp_tokens<'a>(env: &mut Env<'a>, nodes: S<'a>) -> Result<Vec<S<'a>>> { let nodes = nodes.syntax_node_to_list_skip_separator(); let mut state = (None, None, vec![]); // (start, end, result) let mut combine = |state: &mut (Option<S<'a>>, Option<S<'a>>, Vec<S<'a>>)| { match (state.0, state.1) { (Some(s), None) => state.2.push(s), (Some(s), Some(e)) => { let token = env .token_factory .concatenate(s.get_token().unwrap(), e.get_token().unwrap()); let node = env.arena.alloc(Syntax::make_token(token)); state.2.push(node) } _ => {} } state.0 = None; state.1 = None; Ok(()) }; for n in nodes { match &n.children { Token(t) if t.kind() == TK::XHPComment => { if state.0.is_some() { combine(&mut state)?; } } Token(_) => { if state.0.is_none() { state.0 = Some(n) } else { state.1 = Some(n) } } _ => { combine(&mut state)?; state.2.push(n); } } } combine(&mut state)?; Ok(state.2) } fn p_bop<'a>( pos: Pos, node: S<'a>, lhs: ast::Expr, rhs: ast::Expr, env: &mut Env<'a>, ) -> Result<Expr_> { use ast::Bop::*; let mk = |bop, lhs, rhs| Ok(Expr_::mk_binop(Binop { bop, lhs, rhs })); let mk_eq = |op, lhs, rhs| { Ok(Expr_::mk_binop(Binop { bop: Eq(Some(Box::new(op))), lhs, rhs, })) }; match token_kind(node) { Some(TK::Equal) => mk(Eq(None), lhs, rhs), Some(TK::Bar) => mk(Bar, lhs, rhs), Some(TK::Ampersand) => mk(Amp, lhs, rhs), Some(TK::Plus) => mk(Plus, lhs, rhs), Some(TK::Minus) => mk(Minus, lhs, rhs), Some(TK::Star) => mk(Star, lhs, rhs), Some(TK::Carat) => mk(Xor, lhs, rhs), Some(TK::Slash) => mk(Slash, lhs, rhs), Some(TK::Dot) => mk(Dot, lhs, rhs), Some(TK::Percent) => mk(Percent, lhs, rhs), Some(TK::LessThan) => mk(Lt, lhs, rhs), Some(TK::GreaterThan) => mk(Gt, lhs, rhs), Some(TK::EqualEqual) => mk(Eqeq, lhs, rhs), Some(TK::LessThanEqual) => mk(Lte, lhs, rhs), Some(TK::GreaterThanEqual) => mk(Gte, lhs, rhs), Some(TK::StarStar) => mk(Starstar, lhs, rhs), Some(TK::ExclamationEqual) => mk(Diff, lhs, rhs), Some(TK::BarEqual) => mk_eq(Bar, lhs, rhs), Some(TK::PlusEqual) => mk_eq(Plus, lhs, rhs), Some(TK::MinusEqual) => mk_eq(Minus, lhs, rhs), Some(TK::StarEqual) => mk_eq(Star, lhs, rhs), Some(TK::StarStarEqual) => mk_eq(Starstar, lhs, rhs), Some(TK::SlashEqual) => mk_eq(Slash, lhs, rhs), Some(TK::DotEqual) => mk_eq(Dot, lhs, rhs), Some(TK::PercentEqual) => mk_eq(Percent, lhs, rhs), Some(TK::CaratEqual) => mk_eq(Xor, lhs, rhs), Some(TK::AmpersandEqual) => mk_eq(Amp, lhs, rhs), Some(TK::BarBar) => mk(Barbar, lhs, rhs), Some(TK::AmpersandAmpersand) => mk(Ampamp, lhs, rhs), Some(TK::LessThanLessThan) => mk(Ltlt, lhs, rhs), Some(TK::GreaterThanGreaterThan) => mk(Gtgt, lhs, rhs), Some(TK::EqualEqualEqual) => mk(Eqeqeq, lhs, rhs), Some(TK::LessThanLessThanEqual) => mk_eq(Ltlt, lhs, rhs), Some(TK::GreaterThanGreaterThanEqual) => mk_eq(Gtgt, lhs, rhs), Some(TK::ExclamationEqualEqual) => mk(Diff2, lhs, rhs), Some(TK::LessThanEqualGreaterThan) => mk(Cmp, lhs, rhs), Some(TK::QuestionQuestion) => mk(QuestionQuestion, lhs, rhs), Some(TK::QuestionQuestionEqual) => mk_eq(QuestionQuestion, lhs, rhs), /* The ugly duckling; In the FFP, `|>` is parsed as a * `BinaryOperator`, whereas the typed AST has separate constructors for * Pipe and Binop. This is why we don't just project onto a * `bop`, but a `expr -> expr -> expr_`. */ Some(TK::BarGreaterThan) => { let lid = ast::Lid::from_counter(pos, env.next_local_id(), special_idents::DOLLAR_DOLLAR); Ok(Expr_::mk_pipe(lid, lhs, rhs)) } Some(TK::QuestionColon) => Ok(Expr_::mk_eif(lhs, None, rhs)), _ => missing_syntax("binary operator", node, env), } } fn p_exprs_with_loc<'a>(n: S<'a>, e: &mut Env<'a>) -> Result<(Pos, Vec<ast::Expr>)> { Ok(( p_pos(n, e), could_map(n, e, |n, e| { p_expr_with_loc(ExprLocation::UsingStatement, n, e, None) })?, )) } fn p_stmt_list_<'a>( pos: &Pos, mut nodes: Iter<'_, S<'a>>, env: &mut Env<'a>, ) -> Result<Vec<ast::Stmt>> { let mut r = vec![]; loop { match nodes.next() { Some(n) => match &n.children { UsingStatementFunctionScoped(c) => { let body = p_stmt_list_(pos, nodes, env)?; let f = |e: &mut Env<'a>| { Ok(ast::Stmt::new( pos.clone(), ast::Stmt_::mk_using(ast::UsingStmt { is_block_scoped: false, has_await: !c.await_keyword.is_missing(), exprs: p_exprs_with_loc(&c.expression, e)?, block: ast::Block(body), }), )) }; let using = lift_awaits_in_statement_(Either::Right(pos), env, f)?; r.push(using); break Ok(r); } _ => { r.push(p_stmt(n, env)?); } }, _ => break Ok(r), } } } fn handle_loop_body<'a>(pos: Pos, node: S<'a>, env: &mut Env<'a>) -> Result<ast::Stmt> { let list: Vec<_> = node.syntax_node_to_list_skip_separator().collect(); let blk: ast::Block = p_stmt_list_(&pos, list.iter(), env)? .into_iter() .filter(|stmt| !stmt.1.is_noop()) .collect(); let body = if blk.is_empty() { ast::Block(vec![mk_noop(env)]) } else { blk }; Ok(ast::Stmt::new(pos, ast::Stmt_::mk_block(body))) } fn is_simple_assignment_await_expression<'a>(node: S<'a>) -> bool { match &node.children { ParenthesizedExpression(c) => is_simple_assignment_await_expression(&c.expression), BracedExpression(c) => is_simple_assignment_await_expression(&c.expression), BinaryExpression(c) => { token_kind(&c.operator) == Some(TK::Equal) && is_simple_await_expression(&c.right_operand) } _ => false, } } fn is_simple_await_expression<'a>(node: S<'a>) -> bool { match &node.children { ParenthesizedExpression(c) => is_simple_await_expression(&c.expression), BracedExpression(c) => is_simple_await_expression(&c.expression), PrefixUnaryExpression(c) => token_kind(&c.operator) == Some(TK::Await), _ => false, } } fn with_new_nonconcurrent_scope<'a, F, R>(env: &mut Env<'a>, f: F) -> R where F: FnOnce(&mut Env<'a>) -> R, { let saved_lifted_awaits = env.lifted_awaits.take(); let result = f(env); env.lifted_awaits = saved_lifted_awaits; result } fn with_new_concurrent_scope<'a, F, R>(env: &mut Env<'a>, f: F) -> Result<(LiftedAwaitExprs, R)> where F: FnOnce(&mut Env<'a>) -> Result<R>, { let saved_lifted_awaits = env.lifted_awaits.replace(LiftedAwaits { awaits: vec![], lift_kind: LiftedAwaitKind::LiftedFromConcurrent, }); let result = f(env); let lifted_awaits = mem::replace(&mut env.lifted_awaits, saved_lifted_awaits); let result = result?; let awaits = match lifted_awaits { Some(la) => process_lifted_awaits(la, env)?, None => parsing_error("lifted awaits should not be None", env.mk_none_pos())?, }; Ok((awaits, result)) } fn process_lifted_awaits<'a>( mut awaits: LiftedAwaits, env: &mut Env<'a>, ) -> Result<LiftedAwaitExprs> { for await_ in awaits.awaits.iter() { if (await_.1).1.is_none() { return parsing_error("none pos in lifted awaits", env.mk_none_pos()); } } awaits .awaits .sort_unstable_by(|a1, a2| Pos::cmp(&(a1.1).1, &(a2.1).1)); Ok(awaits.awaits) } fn clear_statement_scope<'a, F, R>(env: &mut Env<'a>, f: F) -> R where F: FnOnce(&mut Env<'a>) -> R, { use LiftedAwaitKind::*; match &env.lifted_awaits { Some(LiftedAwaits { lift_kind, .. }) if *lift_kind == LiftedFromStatement => { let saved_lifted_awaits = env.lifted_awaits.take(); let result = f(env); env.lifted_awaits = saved_lifted_awaits; result } _ => f(env), } } fn lift_awaits_in_statement<'a, F>(node: S<'a>, env: &mut Env<'a>, f: F) -> Result<ast::Stmt> where F: FnOnce(&mut Env<'a>) -> Result<ast::Stmt>, { lift_awaits_in_statement_(Either::Left(node), env, f) } fn strip_parens<'a>(node: S<'a>) -> S<'a> { match node.children { ParenthesizedExpression(c) => strip_parens(&c.expression), BracedExpression(c) => strip_parens(&c.expression), _ => node, } } fn lift_awaits_in_statement_<'a, F>( pos: Either<S<'a>, &Pos>, env: &mut Env<'a>, f: F, ) -> Result<ast::Stmt> where F: FnOnce(&mut Env<'a>) -> Result<ast::Stmt>, { use LiftedAwaitKind::*; let (lifted_awaits, result) = match env.lifted_awaits { Some(LiftedAwaits { lift_kind, .. }) if lift_kind == LiftedFromConcurrent => { (None, f(env)?) } _ => { let saved = env.lifted_awaits.replace(LiftedAwaits { awaits: vec![], lift_kind: LiftedFromStatement, }); let result = f(env); let lifted_awaits = mem::replace(&mut env.lifted_awaits, saved); let result = result?; (lifted_awaits, result) } }; if let Some(lifted_awaits) = lifted_awaits { if !lifted_awaits.awaits.is_empty() { let awaits = process_lifted_awaits(lifted_awaits, env)?; let pos = match pos { Either::Left(n) => p_pos(strip_parens(n), env), Either::Right(p) => p.clone(), }; return Ok(ast::Stmt::new( pos, ast::Stmt_::mk_awaitall(awaits, ast::Block(vec![result])), )); } } Ok(result) } fn lift_await<'a>( parent_pos: Pos, expr: ast::Expr, env: &mut Env<'a>, location: ExprLocation, ) -> Expr_ { use ExprLocation::AsStatement; use ExprLocation::RightOfAssignmentInUsingStatement; use ExprLocation::UsingStatement; match (&env.lifted_awaits, location) { (_, UsingStatement) | (_, RightOfAssignmentInUsingStatement) | (None, _) => { Expr_::mk_await(expr) } (Some(_), _) => { if location != AsStatement { let name = env.make_tmp_var_name(); let lid = ast::Lid::new(parent_pos, name.clone()); let await_lid = ast::Lid::new(expr.1.clone(), name); let await_ = (Some(await_lid), expr); if let Some(aw) = env.lifted_awaits.as_mut() { aw.awaits.push(await_) } Expr_::mk_lvar(lid) } else { if let Some(aw) = env.lifted_awaits.as_mut() { aw.awaits.push((None, expr)) } Expr_::Null } } } } fn p_stmt<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Stmt> { clear_statement_scope(env, |e| { let docblock = extract_docblock(node, e); e.push_docblock(docblock); let result = p_stmt_(node, e); e.pop_docblock(); result }) } fn p_stmt_<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Stmt> { let pos = p_pos(node, env); use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; match &node.children { SwitchStatement(c) => p_switch_stmt_(env, pos, c, node), MatchStatement(c) => p_match_stmt(env, pos, c, node), IfStatement(c) => p_if_stmt(env, pos, c, node), ExpressionStatement(c) => p_expression_stmt(env, pos, c, node), CompoundStatement(c) => handle_loop_body(pos, &c.statements, env), SyntaxList(_) => handle_loop_body(pos, node, env), ThrowStatement(c) => p_throw_stmt(env, pos, c, node), DoStatement(c) => p_do_stmt(env, pos, c, node), WhileStatement(c) => p_while_stmt(env, pos, c, node), UsingStatementBlockScoped(c) => p_using_statement_block_scoped_stmt(env, pos, c, node), UsingStatementFunctionScoped(c) => { p_using_statement_function_scoped_stmt(env, pos, c, node) } ForStatement(c) => p_for_stmt(env, pos, c, node), ForeachStatement(c) => p_foreach_stmt(env, pos, c, node), TryStatement(c) => p_try_stmt(env, pos, c, node), ReturnStatement(c) => p_return_stmt(env, pos, c, node), YieldBreakStatement(_) => { env.saw_yield = true; Ok(ast::Stmt::new(pos, ast::Stmt_::mk_yield_break())) } EchoStatement(c) => p_echo_stmt(env, pos, c, node), UnsetStatement(c) => p_unset_stmt(env, pos, c, node), BreakStatement(_) => Ok(new(pos, S_::Break)), ContinueStatement(_) => Ok(new(pos, S_::Continue)), ConcurrentStatement(c) => p_concurrent_stmt(env, pos, c, node), MarkupSection(_) => p_markup(node, env), DeclareLocalStatement(c) => p_declare_local_stmt(env, pos, c), _ => { raise_missing_syntax("statement", node, env); Ok(new(env.mk_none_pos(), S_::Noop)) } } } fn p_while_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a WhileStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, _node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; Ok(new( pos, S_::mk_while(p_expr(&c.condition, env)?, p_block(true, &c.body, env)?), )) } fn p_throw_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a ThrowStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; lift_awaits_in_statement(node, env, |e| { Ok(new(pos, S_::mk_throw(p_expr(&c.expression, e)?))) }) } fn p_try_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a TryStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, _node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; Ok(new( pos, S_::mk_try( p_block(false, &c.compound_statement, env)?, could_map(&c.catch_clauses, env, |n, e| match &n.children { CatchClause(c) => Ok(ast::Catch( pos_name(&c.type_, e)?, lid_from_name(&c.variable, e)?, p_block(true, &c.body, e)?, )), _ => missing_syntax("catch clause", n, e), })?, match &c.finally_clause.children { FinallyClause(c) => p_finally_block(false, &c.body, env)?, _ => Default::default(), }, ), )) } fn p_concurrent_stmt<'a>( env: &mut Env<'a>, _pos: Pos, c: &'a ConcurrentStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, _node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let keyword_pos = p_pos(&c.keyword, env); if env.parser_options.po_unwrap_concurrent { return p_stmt(&c.statement, env); } let (lifted_awaits, Stmt(stmt_pos, stmt)) = with_new_concurrent_scope(env, |e| p_stmt(&c.statement, e))?; let stmt = match stmt { S_::Block(stmts) => { use ast::Bop::Eq; /* Reuse tmp vars from lifted_awaits, this is safe because there will * always be more awaits with tmp vars than statements with assignments. */ let mut tmp_vars = lifted_awaits .iter() .filter_map(|lifted_await| lifted_await.0.as_ref().map(|x| &x.1)); let mut body_stmts = vec![]; let mut assign_stmts = vec![]; for n in stmts.into_iter() { if !n.is_assign_expr() && !n.is_declare_local_stmt() { body_stmts.push(n); continue; } match n { Stmt(p1, S_::Expr(expr)) => { if let Expr((), p2, Expr_::Binop(bop)) = *expr { if let Binop { bop: Eq(op), lhs: e1, rhs: e2, } = *bop { if let Some(tv) = tmp_vars.next() { let tmp_n = Expr::mk_lvar(&e2.1, &(tv.1)); if tmp_n.lvar_name() != e2.lvar_name() { let new_n = new( p1.clone(), S_::mk_expr(Expr::new( (), p2.clone(), Expr_::mk_binop(Binop { bop: Eq(None), lhs: tmp_n.clone(), rhs: e2.clone(), }), )), ); body_stmts.push(new_n); } let assign_stmt = new( p1, S_::mk_expr(Expr::new( (), p2, Expr_::mk_binop(Binop { bop: Eq(op), lhs: e1, rhs: tmp_n, }), )), ); assign_stmts.push(assign_stmt); } else { raise_parsing_error_pos( &stmt_pos, env, &syntax_error::statement_without_await_in_concurrent_block, ); assign_stmts.push(Stmt( p1, S_::Expr(Box::new(Expr( (), p2, Expr_::Binop(Box::new(Binop { bop: Eq(op), lhs: e1, rhs: e2, })), ))), )) } } } } Stmt(p1, S_::DeclareLocal(box (id, hint, Some(e2)))) => { if let Some(tv) = tmp_vars.next() { let tmp_n = Expr::mk_lvar(&e2.1, &(tv.1)); if tmp_n.lvar_name() != e2.lvar_name() { let new_n = new( p1.clone(), S_::mk_expr(Expr::new( (), p1.clone(), Expr_::mk_binop(Binop { bop: Eq(None), lhs: tmp_n.clone(), rhs: e2.clone(), }), )), ); body_stmts.push(new_n); } let assign_stmt = Stmt(p1, S_::DeclareLocal(Box::new((id, hint, Some(tmp_n))))); assign_stmts.push(assign_stmt); } else { raise_parsing_error_pos( &stmt_pos, env, &syntax_error::statement_without_await_in_concurrent_block, ); assign_stmts .push(Stmt(p1, S_::DeclareLocal(Box::new((id, hint, Some(e2)))))) } } Stmt(p1, S_::DeclareLocal(box (id, hint, None))) => { let assign_stmt = Stmt(p1, S_::DeclareLocal(Box::new((id, hint, None)))); assign_stmts.push(assign_stmt); } _ => raise_missing_syntax("assignment statement", &c.keyword, env), } } body_stmts.append(&mut assign_stmts); new(stmt_pos, S_::mk_block(ast::Block(body_stmts))) } _ => missing_syntax("block in concurrent", &c.keyword, env)?, }; Ok(new( keyword_pos, S_::mk_awaitall(lifted_awaits, ast::Block(vec![stmt])), )) } fn p_unset_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a UnsetStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { let args = could_map(&c.variables, e, p_expr_for_normal_argument)?; let unset = match &c.keyword.children { QualifiedName(_) | SimpleTypeSpecifier(_) | Token(_) => { let name = pos_name(&c.keyword, e)?; ast::Expr::new((), name.0.clone(), Expr_::mk_id(name)) } _ => missing_syntax("id", &c.keyword, e)?, }; Ok(new( pos.clone(), S_::mk_expr(ast::Expr::new( (), pos, Expr_::mk_call(ast::CallExpr { func: unset, targs: vec![], args, unpacked_arg: None, }), )), )) }; lift_awaits_in_statement(node, env, f) } fn p_echo_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a EchoStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { let echo = match &c.keyword.children { QualifiedName(_) | SimpleTypeSpecifier(_) | Token(_) => { let name = pos_name(&c.keyword, e)?; ast::Expr::new((), name.0.clone(), Expr_::mk_id(name)) } _ => missing_syntax("id", &c.keyword, e)?, }; let args = could_map(&c.expressions, e, p_expr_for_normal_argument)?; Ok(new( pos.clone(), S_::mk_expr(ast::Expr::new( (), pos, Expr_::mk_call(ast::CallExpr { func: echo, targs: vec![], args, unpacked_arg: None, }), )), )) }; lift_awaits_in_statement(node, env, f) } fn p_return_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a ReturnStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { let expr = match &c.expression.children { Missing => None, _ => Some(p_expr_with_loc( ExprLocation::RightOfReturn, &c.expression, e, None, )?), }; Ok(ast::Stmt::new(pos, ast::Stmt_::mk_return(expr))) }; if is_simple_await_expression(&c.expression) { f(env) } else { lift_awaits_in_statement(node, env, f) } } fn p_foreach_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a ForeachStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { let col = p_expr(&c.collection, e)?; let akw = match token_kind(&c.await_keyword) { Some(TK::Await) => Some(p_pos(&c.await_keyword, e)), _ => None, }; let value = p_expr(&c.value, e)?; let akv = match (akw, &c.key.children) { (Some(p), Missing) => ast::AsExpr::AwaitAsV(p, value), (None, Missing) => ast::AsExpr::AsV(value), (Some(p), _) => ast::AsExpr::AwaitAsKv(p, p_expr(&c.key, e)?, value), (None, _) => ast::AsExpr::AsKv(p_expr(&c.key, e)?, value), }; let blk = p_block(true, &c.body, e)?; Ok(new(pos, S_::mk_foreach(col, akv, blk))) }; lift_awaits_in_statement(node, env, f) } fn p_for_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a ForStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { let ini = could_map(&c.initializer, e, p_expr)?; let ctr = map_optional(&c.control, e, p_expr)?; let eol = could_map(&c.end_of_loop, e, p_expr)?; let blk = p_block(true, &c.body, e)?; Ok(Stmt::new(pos, S_::mk_for(ini, ctr, eol, blk))) }; lift_awaits_in_statement(node, env, f) } fn p_using_statement_function_scoped_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a UsingStatementFunctionScopedChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { Ok(new( pos, S_::mk_using(ast::UsingStmt { is_block_scoped: false, has_await: !&c.await_keyword.is_missing(), exprs: p_exprs_with_loc(&c.expression, e)?, block: ast::Block(vec![mk_noop(e)]), }), )) }; lift_awaits_in_statement(node, env, f) } fn p_using_statement_block_scoped_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a UsingStatementBlockScopedChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { Ok(new( pos, S_::mk_using(ast::UsingStmt { is_block_scoped: true, has_await: !&c.await_keyword.is_missing(), exprs: p_exprs_with_loc(&c.expressions, e)?, block: p_block(false, &c.body, e)?, }), )) }; lift_awaits_in_statement(node, env, f) } fn p_do_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a DoStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, _node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; Ok(new( pos, S_::mk_do( p_block(false /* remove noop */, &c.body, env)?, p_expr(&c.condition, env)?, ), )) } fn p_expression_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a ExpressionStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let expr = &c.expression; let f = |e: &mut Env<'a>| -> Result<ast::Stmt> { if expr.is_missing() { Ok(new(pos, S_::Noop)) } else { Ok(new( pos, S_::mk_expr(p_expr_with_loc(ExprLocation::AsStatement, expr, e, None)?), )) } }; if is_simple_assignment_await_expression(expr) || is_simple_await_expression(expr) { f(env) } else { lift_awaits_in_statement(node, env, f) } } fn p_if_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a IfStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let f = |env: &mut Env<'a>| -> Result<ast::Stmt> { let condition = p_expr(&c.condition, env)?; let statement = p_block(true /* remove noop */, &c.statement, env)?; let else_ = match &c.else_clause.children { ElseClause(c) => p_block(true, &c.statement, env)?, Missing => ast::Block(vec![mk_noop(env)]), _ => missing_syntax("else clause", &c.else_clause, env)?, }; Ok(new(pos, S_::mk_if(condition, statement, else_))) }; lift_awaits_in_statement(node, env, f) } fn p_switch_stmt_<'a>( env: &mut Env<'a>, pos: Pos, c: &'a SwitchStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let new = Stmt::new; let p_label = |n: S<'a>, e: &mut Env<'a>| -> Result<ast::GenCase> { match &n.children { CaseLabel(c) => Ok(aast::GenCase::Case(aast::Case( p_expr(&c.expression, e)?, Default::default(), ))), DefaultLabel(_) => Ok(aast::GenCase::Default(aast::DefaultCase( p_pos(n, e), Default::default(), ))), _ => missing_syntax("switch label", n, e), } }; let p_section = |n: S<'a>, e: &mut Env<'a>| -> Result<Vec<ast::GenCase>> { match &n.children { SwitchSection(c) => { let mut blk = ast::Block(could_map(&c.statements, e, p_stmt)?); if !c.fallthrough.is_missing() { blk.push(new(e.mk_none_pos(), S_::Fallthrough)); } let mut labels = could_map(&c.labels, e, p_label)?; match labels.last_mut() { Some(aast::GenCase::Default(aast::DefaultCase(_, b))) => *b = blk, Some(aast::GenCase::Case(aast::Case(_, b))) => *b = blk, _ => raise_parsing_error(n, e, "Malformed block result"), } Ok(labels) } _ => missing_syntax("switch section", n, e), } }; let f = |env: &mut Env<'a>| -> Result<ast::Stmt> { let cases = itertools::concat(could_map(&c.sections, env, p_section)?); let last_is_default = matches!(cases.last(), Some(aast::GenCase::Default(_))); let (cases, mut defaults): (Vec<ast::Case>, Vec<ast::DefaultCase>) = cases.into_iter().partition_map(|case| match case { aast::GenCase::Case(x @ aast::Case(..)) => Either::Left(x), aast::GenCase::Default(x @ aast::DefaultCase(..)) => Either::Right(x), }); if defaults.len() > 1 { let aast::DefaultCase(pos, _) = &defaults[1]; raise_parsing_error_pos(pos, env, &syntax_error::multiple_defaults_in_switch); } let default = match defaults.pop() { Some(default @ aast::DefaultCase(..)) => { if last_is_default { Some(default) } else { let aast::DefaultCase(pos, _) = default; raise_parsing_error_pos(&pos, env, &syntax_error::default_switch_case_not_last); None } } None => None, }; Ok(new( pos, S_::mk_switch(p_expr(&c.expression, env)?, cases, default), )) }; lift_awaits_in_statement(node, env, f) } fn p_match_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a MatchStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, node: S<'a>, ) -> Result<ast::Stmt> { let f = |env: &mut Env<'a>| -> Result<ast::Stmt> { let expr = p_expr(&c.expression, env)?; let arms = could_map(&c.arms, env, p_match_stmt_arm)?; Ok(ast::Stmt::new( pos, ast::Stmt_::Match(Box::new(ast::StmtMatch { expr, arms })), )) }; lift_awaits_in_statement(node, env, f) } fn p_match_stmt_arm<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::StmtMatchArm> { let c = match &node.children { MatchStatementArm(c) => c, _ => return missing_syntax("match statement", node, env), }; let pat = p_pat(&c.pattern, env)?; let body = p_block(true /* remove noop */, &c.body, env)?; Ok(ast::StmtMatchArm { pat, body }) } fn p_pat<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Pattern> { let pos = p_pos(node, env); match &node.children { VariablePattern(c) => p_variable_pat(env, pos, c), RefinementPattern(c) => p_refinement_pat(env, pos, c), ConstructorPattern(c) => { // Constructor patterns are not yet supported. Wildcard patterns // are, and they're represented in the CST as constructor patterns // with no members. if !c.members.is_missing() { raise_parsing_error(node, env, &syntax_error::destructuring_patterns_nyi); } if token_kind(&c.constructor) == Some(TK::Name) { let name = c.constructor.text(env.source_text()); if !name.starts_with('_') { raise_parsing_error_pos(&pos, env, &syntax_error::wildcard_underscore(name)); } } else { raise_parsing_error(node, env, &syntax_error::constructor_patterns_nyi); } Ok(ast::Pattern::PVar(Box::new(ast::PatVar { pos, id: None }))) } _ => missing_syntax("pattern", node, env), } } fn p_variable_or_wildcard<'a>( pos: Pos, name: S<'a>, env: &mut Env<'a>, ) -> Result<Option<ast::Lid>> { match token_kind(name) { Some(TK::Variable) => { raise_parsing_error_pos(&pos, env, &syntax_error::variable_patterns_nyi); Ok(Some(lid_from_pos_name(pos, name, env)?)) } Some(TK::Name) => { let name = name.text(env.source_text()); if !name.starts_with('_') { raise_parsing_error_pos(&pos, env, &syntax_error::wildcard_underscore(name)); } Ok(None) } _ => missing_syntax("variable or wildcard", name, env), } } fn p_variable_pat<'a>( env: &mut Env<'a>, pos: Pos, c: &'a VariablePatternChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, ) -> Result<ast::Pattern> { Ok(ast::Pattern::PVar(Box::new(ast::PatVar { pos: pos.clone(), id: p_variable_or_wildcard(pos, &c.variable, env)?, }))) } fn p_refinement_pat<'a>( env: &mut Env<'a>, pos: Pos, c: &'a RefinementPatternChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, ) -> Result<ast::Pattern> { Ok(ast::Pattern::PRefinement(Box::new(ast::PatRefinement { pos: pos.clone(), id: p_variable_or_wildcard(pos, &c.variable, env)?, hint: p_hint(&c.specifier, env)?, }))) } fn p_markup<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Stmt> { match &node.children { MarkupSection(c) => { let markup_hashbang = &c.hashbang; let markup_suffix = &c.suffix; let pos = p_pos(node, env); let f = pos.filename(); let expected_suffix_offset = if markup_hashbang.is_missing() { 0 } else { markup_hashbang.width() + 1 /* for newline */ }; if (f.has_extension("hack") || f.has_extension("hackpartial")) && !(markup_suffix.is_missing()) { let ext = f.path().extension().unwrap(); // has_extension ensures this is a Some raise_parsing_error(node, env, &syntax_error::error1060(ext.to_str().unwrap())); } else if f.has_extension("php") && !markup_suffix.is_missing() && markup_suffix.offset() != Some(expected_suffix_offset) { raise_parsing_error(markup_suffix, env, &syntax_error::error1001); } let stmt_ = ast::Stmt_::mk_markup((pos.clone(), text(markup_hashbang, env))); Ok(ast::Stmt::new(pos, stmt_)) } _ => missing_syntax("XHP markup node", node, env), } } fn p_declare_local_stmt<'a>( env: &mut Env<'a>, pos: Pos, c: &'a DeclareLocalStatementChildren<'a, PositionedToken<'a>, PositionedValue<'a>>, ) -> Result<ast::Stmt> { use ast::Stmt; use ast::Stmt_ as S_; let var = lid_from_pos_name(pos.clone(), &c.variable, env)?; let hint = p_hint(&c.type_, env)?; if let SimpleInitializer(c) = c.initializer.children { let expr_tmp = p_expr(&c.value, env)?; Ok(Stmt::new( pos, S_::mk_declare_local(var, hint, Some(expr_tmp)), )) } else { assert!(c.initializer.is_missing()); Ok(Stmt::new(pos, S_::mk_declare_local(var, hint, None))) } } fn p_modifiers<'a, F: Fn(R, modifier::Kind) -> R, R>( on_kind: F, mut init: R, node: S<'a>, env: &mut Env<'a>, ) -> (modifier::KindSet, R) { let mut kind_set = modifier::KindSet::new(); for n in node.syntax_node_to_list_skip_separator() { let token_kind = token_kind(n).and_then(modifier::from_token_kind); match token_kind { Some(kind) => { kind_set.add(kind); init = on_kind(init, kind); } _ => { raise_missing_syntax("kind", n, env); } } } (kind_set, init) } fn p_kinds<'a>(node: S<'a>, env: &mut Env<'a>) -> modifier::KindSet { p_modifiers(|_, _| {}, (), node, env).0 } /// Apply `f` to every item in `node`, and build a vec of the values returned. fn could_map<'a, R, F>(node: S<'a>, env: &mut Env<'a>, f: F) -> Result<Vec<R>> where F: Fn(S<'a>, &mut Env<'a>) -> Result<R>, { let nodes = node.syntax_node_to_list_skip_separator(); let (min, _) = nodes.size_hint(); let mut v = Vec::with_capacity(min); for n in nodes { v.push(f(n, env)?); } Ok(v) } fn could_map_emit_error<'a, R, F>(node: S<'a>, env: &mut Env<'a>, f: F) -> Vec<R> where F: Fn(S<'a>, &mut Env<'a>) -> Result<R>, { let mut v = vec![]; for n in node.syntax_node_to_list_skip_separator() { match f(n, env) { Ok(value) => { v.push(value); } Err(e) => { emit_error(e, env); } } } v } fn p_visibility<'a>(node: S<'a>, env: &mut Env<'a>) -> Option<ast::Visibility> { let first_vis = |r: Option<ast::Visibility>, kind| r.or_else(|| modifier::to_visibility(kind)); p_modifiers(first_vis, None, node, env).1 } fn p_visibility_last_win<'a>(node: S<'a>, env: &mut Env<'a>) -> Option<ast::Visibility> { let last_vis = |r, kind| modifier::to_visibility(kind).or(r); p_modifiers(last_vis, None, node, env).1 } fn p_visibility_last_win_or<'a>( node: S<'a>, env: &mut Env<'a>, default: ast::Visibility, ) -> ast::Visibility { p_visibility_last_win(node, env).unwrap_or(default) } fn has_soft(attrs: &[ast::UserAttribute]) -> bool { attrs.iter().any(|attr| attr.name.1 == special_attrs::SOFT) } fn soften_hint(attrs: &[ast::UserAttribute], hint: ast::Hint) -> ast::Hint { if has_soft(attrs) { ast::Hint::new(hint.0.clone(), ast::Hint_::Hsoft(hint)) } else { hint } } fn strip_ns(name: &str) -> &str { match name.chars().next() { Some('\\') => &name[1..], _ => name, } } // The contexts `ctx $f`, `$a::C`, and `T::C` all depend on the ability to generate a backing tparam // on the function or method. The `this::C` context does not, so it may appear in more places. fn is_polymorphic_context<'a>(env: &mut Env<'a>, hint: &ast::Hint, ignore_this: bool) -> bool { use ast::Hint_::Haccess; use ast::Hint_::Happly; use ast::Hint_::HfunContext; use ast::Hint_::Hvar; match *hint.1 { HfunContext(_) => true, Haccess(ref root, _) => match &*root.1 { Happly(oxidized::ast::Id(_, id), _) => { let s = id.as_str(); /* TODO(coeffects) There is an opportunity to represent this structurally * in the AST if we refactor so generic hints lower as Habstr instead of * Happly, like we do in the direct decl parser. */ (strip_ns(s) == sn::typehints::THIS && !ignore_this) || env.fn_generics_mut().contains_key(s) || env.cls_generics_mut().contains_key(s) } Hvar(_) => true, _ => false, }, _ => false, } } fn has_polymorphic_context<'a>(env: &mut Env<'a>, contexts: Option<&ast::Contexts>) -> bool { if let Some(ast::Contexts(_, ref context_hints)) = contexts { return context_hints .iter() .any(|c| is_polymorphic_context(env, c, false)); } else { false } } fn has_any_policied_context(contexts: Option<&ast::Contexts>) -> bool { if let Some(ast::Contexts(_, ref context_hints)) = contexts { return context_hints.iter().any(|hint| match &*hint.1 { ast::Hint_::Happly(ast::Id(_, id), _) => sn::coeffects::is_any_zoned(id), _ => false, }); } else { false } } fn has_any_policied_or_defaults_context(contexts: Option<&ast::Contexts>) -> bool { if let Some(ast::Contexts(_, ref context_hints)) = contexts { return context_hints.iter().any(|hint| match &*hint.1 { ast::Hint_::Happly(ast::Id(_, id), _) => sn::coeffects::is_any_zoned_or_defaults(id), _ => false, }); } else { true } } fn has_any_context(haystack: Option<&ast::Contexts>, needles: Vec<&str>) -> bool { if let Some(ast::Contexts(_, ref context_hints)) = haystack { return context_hints.iter().any(|hint| match &*hint.1 { ast::Hint_::Happly(ast::Id(_, id), _) => needles.iter().any(|&context| id == context), _ => false, }); } else { true } } fn contexts_cannot_access_ic(haystack: Option<&ast::Contexts>) -> bool { if let Some(ast::Contexts(_, ref context_hints)) = haystack { return context_hints.iter().all(|hint| match &*hint.1 { ast::Hint_::Happly(ast::Id(_, id), _) => { sn::coeffects::is_any_without_implicit_policy_or_unsafe(id) } _ => false, }); } else { false // no context list -> implicit [defaults] } } // For polymorphic context with form `ctx $f` // require that `(function (ts)[_]: t) $f` exists // rewrite as `(function (ts)[ctx $f]: t) $f` // add a type parameter named "T/[ctx $f]" fn rewrite_fun_ctx<'a>( env: &mut Env<'a>, tparams: &mut Vec<ast::Tparam>, hint: &mut ast::Hint, name: &str, ) { use ast::Hint_; use ast::ReifyKind; use ast::Variance; let mut invalid = |p| raise_parsing_error_pos(p, env, &syntax_error::ctx_fun_invalid_type_hint(name)); match *hint.1 { Hint_::Hfun(ref mut hf) => { if let Some(ast::Contexts(ref p, ref mut hl)) = &mut hf.ctxs { if let [ref mut h] = *hl.as_mut_slice() { if let Hint_::Hwildcard = &*h.1 { *h.1 = Hint_::HfunContext(name.to_string()); tparams.push(ast::Tparam { variance: Variance::Invariant, name: ast::Id(h.0.clone(), format!("T/[ctx {}]", name)), parameters: vec![], constraints: vec![], reified: ReifyKind::Erased, user_attributes: Default::default(), }); } else { invalid(&h.0); } } else { invalid(p); } } else { invalid(&hint.0); } } Hint_::Hlike(ref mut h) | Hint_::Hoption(ref mut h) => { rewrite_fun_ctx(env, tparams, h, name) } Hint_::Happly(ast::Id(_, ref type_name), ref mut targs) if type_name == special_typehints::SUPPORTDYN => { if let Some(ref mut h) = targs.first_mut() { rewrite_fun_ctx(env, tparams, h, name) } else { invalid(&hint.0) } } _ => invalid(&hint.0), } } fn rewrite_effect_polymorphism<'a>( env: &mut Env<'a>, params: &mut [ast::FunParam], tparams: &mut Vec<ast::Tparam>, contexts: Option<&ast::Contexts>, where_constraints: &mut Vec<ast::WhereConstraintHint>, ) { use ast::Hint; use ast::Hint_; use ast::ReifyKind; use ast::Variance; use Hint_::Haccess; use Hint_::Happly; use Hint_::HfunContext; use Hint_::Hvar; if !has_polymorphic_context(env, contexts) { return; } let ast::Contexts(ref _p, ref context_hints) = contexts.as_ref().unwrap(); let tp = |name, v| ast::Tparam { variance: Variance::Invariant, name, parameters: vec![], constraints: v, reified: ReifyKind::Erased, user_attributes: Default::default(), }; // For polymorphic context with form `$g::C` // if $g's type is not a type parameter // add one named "T/$g" constrained by $g's type // replace $g's type hint // let Tg denote $g's final type (must be a type parameter). // add a type parameter "T/[$g::C]" // add a where constraint T/[$g::C] = Tg :: C let rewrite_arg_ctx = |env: &mut Env<'a>, tparams: &mut Vec<ast::Tparam>, where_constraints: &mut Vec<ast::WhereConstraintHint>, hint: &mut Hint, param_pos: &Pos, name: &str, context_pos: &Pos, cst: &ast::Id| match *hint.1 { Happly(ast::Id(_, ref type_name), _) => { if !tparams.iter().any(|h| h.name.1 == *type_name) { // If the parameter is X $g, create tparam `T$g as X` and replace $g's type hint let id = ast::Id(param_pos.clone(), "T/".to_string() + name); tparams.push(tp( id.clone(), vec![(ast::ConstraintKind::ConstraintAs, hint.clone())], )); *hint = ast::Hint::new(param_pos.clone(), Happly(id, vec![])); }; let right = ast::Hint::new( context_pos.clone(), Haccess(hint.clone(), vec![cst.clone()]), ); let left_id = ast::Id(context_pos.clone(), format!("T/[{}::{}]", name, &cst.1)); tparams.push(tp(left_id.clone(), vec![])); let left = ast::Hint::new(context_pos.clone(), Happly(left_id, vec![])); where_constraints.push(ast::WhereConstraintHint( left, ast::ConstraintKind::ConstraintEq, right, )) } _ => raise_parsing_error_pos(&hint.0, env, &syntax_error::ctx_var_invalid_type_hint(name)), }; let mut hint_by_param: HashMap<&str, (&mut Option<ast::Hint>, &Pos, aast::IsVariadic)> = HashMap::default(); for param in params.iter_mut() { hint_by_param.insert( param.name.as_ref(), (&mut param.type_hint.1, &param.pos, param.is_variadic), ); } for context_hint in context_hints { match *context_hint.1 { HfunContext(ref name) => match hint_by_param.get_mut::<str>(name) { Some((hint_opt, param_pos, _is_variadic)) => match hint_opt { Some(_) if env.codegen() => {} Some(ref mut param_hint) => rewrite_fun_ctx(env, tparams, param_hint, name), None => raise_parsing_error_pos( param_pos, env, &syntax_error::ctx_var_missing_type_hint(name), ), }, None => raise_parsing_error_pos( &context_hint.0, env, &syntax_error::ctx_var_invalid_parameter(name), ), }, Haccess(ref root, ref csts) => { if let Hvar(ref name) = *root.1 { match hint_by_param.get_mut::<str>(name) { Some((hint_opt, param_pos, is_variadic)) => { if *is_variadic { raise_parsing_error_pos( param_pos, env, &syntax_error::ctx_var_variadic(name), ) } else { match hint_opt { Some(_) if env.codegen() => {} Some(ref mut param_hint) => { let mut rewrite = |h| { rewrite_arg_ctx( env, tparams, where_constraints, h, param_pos, name, &context_hint.0, &csts[0], ) }; match *param_hint.1 { Hint_::Hlike(ref mut h) => match *h.1 { Hint_::Hoption(ref mut hinner) => rewrite(hinner), _ => rewrite(h), }, Hint_::Hoption(ref mut h) => rewrite(h), _ => rewrite(param_hint), } } None => raise_parsing_error_pos( param_pos, env, &syntax_error::ctx_var_missing_type_hint(name), ), } } } None => raise_parsing_error_pos( &root.0, env, &syntax_error::ctx_var_invalid_parameter(name), ), } } else if let Happly(ast::Id(_, ref id), _) = *root.1 { // For polymorphic context with form `T::*::C` where `T` is a reified generic // add a type parameter "T/[T::*::C]" // add a where constraint T/[T::*::C] = T :: C let haccess_string = |id, csts: &Vec<aast::Sid>| { format!("{}::{}", id, csts.iter().map(|c| c.1.clone()).join("::")) }; match env.get_reification(id) { None => {} // not a generic Some(false) => raise_parsing_error_pos( &root.0, env, &syntax_error::ctx_generic_invalid(id, haccess_string(id, csts)), ), Some(true) if env.codegen() => {} Some(true) => { let left_id = ast::Id( context_hint.0.clone(), format!("T/[{}]", haccess_string(id, csts)), ); tparams.push(tp(left_id.clone(), vec![])); let left = ast::Hint::new(context_hint.0.clone(), Happly(left_id, vec![])); where_constraints.push(ast::WhereConstraintHint( left, ast::ConstraintKind::ConstraintEq, context_hint.clone(), )); } } } } _ => {} } } } fn p_fun_param_default_value<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<Option<ast::Expr>> { match &node.children { SimpleInitializer(c) => map_optional(&c.value, env, p_expr), _ => Ok(None), } } fn p_param_kind<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::ParamKind> { match token_kind(node) { Some(TK::Inout) => Ok(ast::ParamKind::Pinout(p_pos(node, env))), None => Ok(ast::ParamKind::Pnormal), _ => missing_syntax("param kind", node, env), } } fn p_readonly<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::ReadonlyKind> { match token_kind(node) { Some(TK::Readonly) => Ok(ast::ReadonlyKind::Readonly), _ => missing_syntax("readonly", node, env), } } fn param_template<'a>(node: S<'a>, env: &Env<'_>) -> ast::FunParam { let pos = p_pos(node, env); ast::FunParam { annotation: (), type_hint: ast::TypeHint((), None), is_variadic: false, pos, name: text(node, env), expr: None, callconv: ast::ParamKind::Pnormal, readonly: None, user_attributes: Default::default(), visibility: None, } } fn p_fun_param<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::FunParam> { match &node.children { ParameterDeclaration(ParameterDeclarationChildren { attribute, visibility, call_convention, readonly, type_, name, default_value, }) => { let (is_variadic, name) = match &name.children { DecoratedExpression(DecoratedExpressionChildren { decorator, expression, }) => { let decorator = text_str(decorator, env); match &expression.children { DecoratedExpression(c) => { let nested_expression = &c.expression; let nested_decorator = text_str(&c.decorator, env); ( decorator == "..." || nested_decorator == "...", nested_expression, ) } _ => (decorator == "...", expression), } } _ => (false, name), }; let user_attributes = p_user_attributes(attribute, env); let pos = p_pos(name, env); let name = text(name, env); let hint = map_optional(type_, env, p_hint)?; let hint = hint.map(|h| soften_hint(&user_attributes, h)); if is_variadic && !user_attributes.is_empty() { raise_parsing_error( node, env, &syntax_error::no_attributes_on_variadic_parameter, ); } Ok(ast::FunParam { annotation: (), type_hint: ast::TypeHint((), hint), user_attributes, is_variadic, pos, name, expr: p_fun_param_default_value(default_value, env)?, callconv: p_param_kind(call_convention, env)?, readonly: map_optional(readonly, env, p_readonly)?, /* implicit field via constructor parameter. * This is always None except for constructors and the modifier * can be only Public or Protected or Private. */ visibility: p_visibility(visibility, env), }) } VariadicParameter(_) => { let mut param = param_template(node, env); param.is_variadic = true; Ok(param) } Token(_) if text_str(node, env) == "..." => { let mut param = param_template(node, env); param.is_variadic = true; Ok(param) } _ => missing_syntax("function parameter", node, env), } } fn p_tconstraint_ty<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Hint> { match &node.children { TypeConstraint(c) => p_hint(&c.type_, env), _ => missing_syntax("type constraint", node, env), } } fn p_tconstraint<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<(ast::ConstraintKind, ast::Hint)> { match &node.children { TypeConstraint(c) => Ok(( match token_kind(&c.keyword) { Some(TK::As) => ast::ConstraintKind::ConstraintAs, Some(TK::Super) => ast::ConstraintKind::ConstraintSuper, Some(TK::Equal) => ast::ConstraintKind::ConstraintEq, _ => missing_syntax("constraint operator", &c.keyword, env)?, }, p_hint(&c.type_, env)?, )), _ => missing_syntax("type constraint", node, env), } } fn p_tparam<'a>(is_class: bool, node: S<'a>, env: &mut Env<'a>) -> Result<ast::Tparam> { match &node.children { TypeParameter(TypeParameterChildren { attribute_spec, reified, variance, name, param_params, constraints, }) => { let user_attributes = p_user_attributes(attribute_spec, env); let is_reified = !reified.is_missing(); let type_name = text(name, env); if is_class { env.cls_generics_mut().insert(type_name, is_reified); } else { // this is incorrect for type aliases, but it doesn't affect any check env.fn_generics_mut().insert(type_name, is_reified); } let variance = match token_kind(variance) { Some(TK::Plus) => ast::Variance::Covariant, Some(TK::Minus) => ast::Variance::Contravariant, _ => ast::Variance::Invariant, }; if is_reified && variance != ast::Variance::Invariant { raise_parsing_error(node, env, &syntax_error::non_invariant_reified_generic); } let reified = match (is_reified, has_soft(&user_attributes)) { (true, true) => ast::ReifyKind::SoftReified, (true, false) => ast::ReifyKind::Reified, _ => ast::ReifyKind::Erased, }; let parameters = p_tparam_l(is_class, param_params, env)?; Ok(ast::Tparam { variance, name: pos_name(name, env)?, parameters, constraints: could_map(constraints, env, p_tconstraint)?, reified, user_attributes, }) } _ => missing_syntax("type parameter", node, env), } } fn p_tparam_l<'a>(is_class: bool, node: S<'a>, env: &mut Env<'a>) -> Result<Vec<ast::Tparam>> { match &node.children { Missing => Ok(vec![]), TypeParameters(c) => could_map(&c.parameters, env, |n, e| p_tparam(is_class, n, e)), _ => missing_syntax("type parameter", node, env), } } /// Lowers multiple constraints into a hint pair (lower_bound, upper_bound) fn p_ctx_constraints<'a>( node: S<'a>, env: &mut Env<'a>, ) -> Result<(Option<ast::Hint>, Option<ast::Hint>)> { let constraints = could_map(node, env, |node, env| { if let ContextConstraint(c) = &node.children { if let Some(hint) = p_context_list_to_intersection( &c.ctx_list, env, "Contexts cannot be bounded by polymorphic contexts", ) { Ok(match token_kind(&c.keyword) { Some(TK::Super) => Either::Left(hint), Some(TK::As) => Either::Right(hint), _ => missing_syntax("constraint operator", &c.keyword, env)?, }) } else { missing_syntax("contexts", &c.keyword, env)? } } else { missing_syntax("context constraint", node, env)? } })?; let (super_constraint, as_constraint) = constraints.into_iter().partition_map(|x| x); let require_one = &mut |kind: &str, cs: Vec<_>| { if cs.len() > 1 { let msg = format!( "Multiple `{}` constraints on a ctx constant are not allowed", kind ); raise_parsing_error(node, env, &msg); } cs.into_iter().next() }; Ok(( require_one("super", super_constraint), require_one("as", as_constraint), )) } fn p_contexts<'a>( node: S<'a>, env: &mut Env<'a>, error_on_polymorphic: Option<(&str, bool)>, ) -> Option<ast::Contexts> { match &node.children { Missing => None, Contexts(c) => { let hints = could_map_emit_error(&c.types, env, |node, env| { let h = p_hint(node, env)?; if let Some((e, ignore_this)) = error_on_polymorphic { if is_polymorphic_context(env, &h, ignore_this) { raise_parsing_error(node, env, e); } } Ok(h) }); let pos = p_pos(node, env); let ctxs = ast::Contexts(pos, hints); Some(ctxs) } _ => { raise_missing_syntax("contexts", node, env); None } } } fn p_context_list_to_intersection<'a>( ctx_list: S<'a>, env: &mut Env<'a>, polymorphic_error: &str, ) -> Option<ast::Hint> { p_contexts(ctx_list, env, Some((polymorphic_error, false))) .map(|t| ast::Hint::new(t.0, ast::Hint_::Hintersection(t.1))) } fn p_fun_hdr<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<FunHdr> { match &node.children { FunctionDeclarationHeader(FunctionDeclarationHeaderChildren { modifiers, name, where_clause, type_parameter_list, parameter_list, type_, contexts, readonly_return, .. }) => { if name.value.is_missing() { raise_parsing_error(name, env, &syntax_error::empty_method_name); } let kinds = p_kinds(modifiers, env); let has_async = kinds.has(modifier::ASYNC); let internal = kinds.has(modifier::INTERNAL); let readonly_this = if kinds.has(modifier::READONLY) { Some(ast::ReadonlyKind::Readonly) } else { None }; let readonly_ret = map_optional(readonly_return, env, p_readonly)?; let mut type_parameters = p_tparam_l(false, type_parameter_list, env)?; let mut parameters = match could_map(parameter_list, env, p_fun_param) { Ok(params) => params, Err(e) => { emit_error(e, env); vec![] } }; let contexts = p_contexts(contexts, env, None); let mut constrs = p_where_constraint(false, node, where_clause, env)?; rewrite_effect_polymorphism( env, &mut parameters, &mut type_parameters, contexts.as_ref(), &mut constrs, ); let return_type = map_optional(type_, env, p_hint)?; let suspension_kind = mk_suspension_kind_(has_async); let name = pos_name(name, env)?; let unsafe_contexts = contexts.clone(); Ok(FunHdr { suspension_kind, readonly_this, name, internal, constrs, type_parameters, parameters, contexts, unsafe_contexts, readonly_return: readonly_ret, return_type, }) } // TODO: this code seems to be dead, as the only callers of p_fun_hdr come from MethodishDeclaration and FunctionDeclaration LambdaSignature(LambdaSignatureChildren { parameters, contexts, type_, readonly_return, .. }) => { let readonly_ret = map_optional(readonly_return, env, p_readonly)?; let mut header = FunHdr::make_empty(env); header.parameters = could_map(parameters, env, p_fun_param)?; let contexts = p_contexts(contexts, env, None); let unsafe_contexts = contexts.clone(); header.contexts = contexts; header.unsafe_contexts = unsafe_contexts; header.return_type = map_optional(type_, env, p_hint)?; header.readonly_return = readonly_ret; Ok(header) } Token(_) => Ok(FunHdr::make_empty(env)), _ => missing_syntax("function header", node, env), } } fn p_fun_pos<'a>(node: S<'a>, env: &Env<'_>) -> Pos { let get_pos = |n: S<'a>, p: Pos| -> Pos { if let FunctionDeclarationHeader(c1) = &n.children { if !c1.keyword.is_missing() { return Pos::btw_nocheck(p_pos(&c1.keyword, env), p); } } p }; let p = p_pos(node, env); match &node.children { FunctionDeclaration(c) if env.codegen() => get_pos(&c.declaration_header, p), MethodishDeclaration(c) if env.codegen() => get_pos(&c.function_decl_header, p), _ => p, } } fn p_block<'a>(remove_noop: bool, node: S<'a>, env: &mut Env<'a>) -> Result<ast::Block> { let ast::Stmt(p, stmt_) = p_stmt(node, env)?; if let ast::Stmt_::Block(blk) = stmt_ { if remove_noop && blk.len() == 1 && blk[0].1.is_noop() { return Ok(Default::default()); } Ok(blk) } else { Ok(ast::Block(vec![ast::Stmt(p, stmt_)])) } } fn p_finally_block<'a>( remove_noop: bool, node: S<'a>, env: &mut Env<'a>, ) -> Result<ast::FinallyBlock> { let ast::Stmt(p, stmt_) = p_stmt(node, env)?; if let ast::Stmt_::Block(blk) = stmt_ { if remove_noop && blk.len() == 1 && blk[0].1.is_noop() { return Ok(Default::default()); } Ok(ast::FinallyBlock(blk.0)) } else { Ok(ast::FinallyBlock(vec![ast::Stmt(p, stmt_)])) } } fn mk_noop(env: &Env<'_>) -> ast::Stmt { ast::Stmt::noop(env.mk_none_pos()) } fn p_function_body<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::Block> { let mk_noop_result = |e: &Env<'_>| Ok(ast::Block(vec![mk_noop(e)])); let f = |e: &mut Env<'a>| -> Result<ast::Block> { match &node.children { Missing => Ok(Default::default()), CompoundStatement(c) => { let compound_statements = &c.statements.children; let compound_right_brace = &c.right_brace.children; match (compound_statements, compound_right_brace) { (Missing, Token(_)) => mk_noop_result(e), (SyntaxList(t), _) if t.len() == 1 && t[0].is_yield() => { e.saw_yield = true; mk_noop_result(e) } _ => { if !e.top_level_statements && ((e.file_mode() == file_info::Mode::Mhhi && !e.codegen()) || e.quick_mode) { mk_noop_result(e) } else { p_block(false /*remove noop*/, node, e) } } } } _ => { let f = |e: &mut Env<'a>| { let expr = p_expr(node, e)?; Ok(ast::Stmt::new( expr.1.clone(), ast::Stmt_::mk_return(Some(expr)), )) }; if is_simple_await_expression(node) { Ok(ast::Block(vec![f(e)?])) } else { Ok(ast::Block(vec![lift_awaits_in_statement(node, e, f)?])) } } } }; with_new_nonconcurrent_scope(env, f) } fn mk_suspension_kind<'a>(async_keyword: S<'a>) -> SuspensionKind { mk_suspension_kind_(!async_keyword.is_missing()) } fn mk_suspension_kind_(has_async: bool) -> SuspensionKind { if has_async { SuspensionKind::SKAsync } else { SuspensionKind::SKSync } } fn mk_fun_kind(suspension_kind: SuspensionKind, yield_: bool) -> ast::FunKind { use ast::FunKind::*; use SuspensionKind::*; match (suspension_kind, yield_) { (SKSync, true) => FGenerator, (SKAsync, true) => FAsyncGenerator, (SKSync, false) => FSync, (SKAsync, false) => FAsync, } } fn process_attribute_constructor_call<'a>( node: S<'a>, constructor_call_argument_list: S<'a>, constructor_call_type: S<'a>, env: &mut Env<'a>, ) -> Result<ast::UserAttribute> { let name = pos_name(constructor_call_type, env)?; if name.1.eq_ignore_ascii_case("__reified") || name.1.eq_ignore_ascii_case("__hasreifiedparent") { raise_parsing_error(node, env, &syntax_error::reified_attribute); } else if name.1.eq_ignore_ascii_case(special_attrs::SOFT) && constructor_call_argument_list .syntax_node_to_list_skip_separator() .count() > 0 { raise_parsing_error(node, env, &syntax_error::soft_no_arguments); } else if sn::user_attributes::is_memoized(&name.1) { let list: Vec<_> = constructor_call_argument_list .syntax_node_to_list_skip_separator() .collect(); if let Some(first_arg) = list.first() { if !matches!(first_arg.children, EnumClassLabelExpression(_)) { raise_parsing_error( first_arg, env, &syntax_error::memoize_requires_label(&name.1), ); } } if list.len() > 1 { let ast::Id(_, first) = pos_name(list[0], env)?; let ast::Id(_, second) = pos_name(list[1], env)?; if first == "#SoftMakeICInaccessible" { if list.len() > 2 { raise_parsing_error( list[2], env, &syntax_error::memoize_invalid_arity(&name.1, 2, &first), ); } if second.parse::<u32>().is_err() { raise_parsing_error(list[1], env, &syntax_error::memoize_invalid_sample_rate); } } else { raise_parsing_error( list[1], env, &syntax_error::memoize_invalid_arity(&name.1, 1, &first), ); } } } let params = could_map(constructor_call_argument_list, env, |n, e| { is_valid_attribute_arg(n, e, &name.1); p_expr(n, e) })?; Ok(ast::UserAttribute { name, params }) } // Arguments to attributes must be literals (int, string, etc), collections // (eg vec, dict, keyset, etc), Foo::class strings, shapes, string // concatenations, or tuples. fn is_valid_attribute_arg<'a>(node: S<'a>, env: &mut Env<'a>, attr_name: &str) { let is_valid_list = |nodes: S<'a>, env: &mut Env<'a>| { let _ = could_map(nodes, env, |n, e| { is_valid_attribute_arg(n, e, attr_name); Ok(()) }); }; match &node.children { ParenthesizedExpression(c) => is_valid_attribute_arg(&c.expression, env, attr_name), // Normal literals (string, int, etc) LiteralExpression(_) => {} // Only allow enum class label syntax on __Memoize and __MemizeLSB. EnumClassLabelExpression(ecl) if sn::user_attributes::is_memoized(attr_name) => { if let Ok(ast::Id(_, label_name)) = pos_name(&ecl.expression, env) { if !sn::memoize_option::is_valid(&label_name) { raise_parsing_error(node, env, &syntax_error::memoize_invalid_label(attr_name)); } } } // ::class strings ScopeResolutionExpression(c) => { if let Some(TK::Class) = token_kind(&c.name) { } else { raise_parsing_error(node, env, &syntax_error::expression_as_attribute_arguments); } } // Negations PrefixUnaryExpression(c) => { is_valid_attribute_arg(&c.operand, env, attr_name); match token_kind(&c.operator) { Some(TK::Minus) => {} Some(TK::Plus) => {} _ => { raise_parsing_error(node, env, &syntax_error::expression_as_attribute_arguments) } } } // String concatenation BinaryExpression(c) => { if let Some(TK::Dot) = token_kind(&c.operator) { is_valid_attribute_arg(&c.left_operand, env, attr_name); is_valid_attribute_arg(&c.right_operand, env, attr_name); } else { raise_parsing_error(node, env, &syntax_error::expression_as_attribute_arguments); } } // Top-level Collections DarrayIntrinsicExpression(c) => is_valid_list(&c.members, env), DictionaryIntrinsicExpression(c) => is_valid_list(&c.members, env), KeysetIntrinsicExpression(c) => is_valid_list(&c.members, env), VarrayIntrinsicExpression(c) => is_valid_list(&c.members, env), VectorIntrinsicExpression(c) => is_valid_list(&c.members, env), ShapeExpression(c) => is_valid_list(&c.fields, env), TupleExpression(c) => is_valid_list(&c.items, env), // Collection Internals FieldInitializer(c) => { is_valid_attribute_arg(&c.name, env, attr_name); is_valid_attribute_arg(&c.value, env, attr_name); } ElementInitializer(c) => { is_valid_attribute_arg(&c.key, env, attr_name); is_valid_attribute_arg(&c.value, env, attr_name); } // Everything else is not allowed _ => raise_parsing_error(node, env, &syntax_error::expression_as_attribute_arguments), } } fn p_user_attribute<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::UserAttributes> { let p_attr = |n: S<'a>, e: &mut Env<'a>| -> Result<ast::UserAttribute> { match &n.children { ConstructorCall(c) => { process_attribute_constructor_call(node, &c.argument_list, &c.type_, e) } _ => missing_syntax("attribute", node, e), } }; match &node.children { FileAttributeSpecification(c) => could_map(&c.attributes, env, p_attr), OldAttributeSpecification(c) => could_map(&c.attributes, env, p_attr), AttributeSpecification(c) => could_map(&c.attributes, env, |n, e| match &n.children { Attribute(c) => p_attr(&c.attribute_name, e), _ => missing_syntax("attribute", node, e), }), _ => missing_syntax("attribute specification", node, env), } .map(ast::UserAttributes) } fn p_user_attributes<'a>(node: S<'a>, env: &mut Env<'a>) -> ast::UserAttributes { let attributes = could_map_emit_error(node, env, p_user_attribute); attributes.into_iter().flatten().collect() } /// Extract the URL in `<<__Docs("http://example.com")>>` if the __Docs attribute /// is present. fn p_docs_url<'a>(attrs: &ast::UserAttributes, env: &mut Env<'a>) -> Option<String> { let mut url = None; for attr in attrs { if attr.name.1 == sn::user_attributes::DOCS { match attr.params.as_slice() { [param] => match &param.2 { ast::Expr_::String(s) => match String::from_utf8(s.to_vec()) { Ok(s) => { url = Some(s); } Err(_) => raise_parsing_error_pos( &attr.name.0, env, "`__Docs` URLs must be valid UTF-8", ), }, _ => raise_parsing_error_pos( &attr.name.0, env, "`__Docs` URLs must be a string literal", ), }, _ => { // Wrong number of arguments to __Docs, // ignore. The attribute arity checks will tell // the user their code is wrong. } } } } url } fn map_yielding<'a, F, R>(node: S<'a>, env: &mut Env<'a>, p: F) -> Result<(R, bool)> where F: FnOnce(S<'a>, &mut Env<'a>) -> Result<R>, { let outer_saw_yield = env.saw_yield; env.saw_yield = false; let r = p(node, env); let saw_yield = env.saw_yield; env.saw_yield = outer_saw_yield; Ok((r?, saw_yield)) } fn mk_empty_ns_env(env: &Env<'_>) -> Arc<NamespaceEnv> { Arc::clone(&env.empty_ns_env) } fn extract_docblock<'a>(node: S<'a>, env: &Env<'_>) -> Option<DocComment> { #[derive(Copy, Clone, Eq, PartialEq)] enum ScanState { DocComment, EmbeddedCmt, EndDoc, EndEmbedded, Free, LineCmt, MaybeDoc, MaybeDoc2, SawSlash, } use ScanState::*; // `parse` mixes loop and recursion to use less stack space. fn parse( str: &str, start: usize, state: ScanState, idx: usize, ) -> Option<(usize, usize, String)> { let is_whitespace = |c| c == ' ' || c == '\t' || c == '\n' || c == '\r'; let mut s = (start, state, idx); let chars = str.as_bytes(); loop { if s.2 == str.len() { break None; } let next = s.2 + 1; match (s.1, chars[s.2] as char) { (LineCmt, '\n') => s = (next, Free, next), (EndEmbedded, '/') => s = (next, Free, next), (EndDoc, '/') => { let r = parse(str, next, Free, next); match r { d @ Some(_) => break d, None => break Some((s.0, s.2 + 1, String::from(&str[s.0..s.2 + 1]))), } } /* PHP has line comments delimited by a # */ (Free, '#') => s = (next, LineCmt, next), /* All other comment delimiters start with a / */ (Free, '/') => s = (s.2, SawSlash, next), /* After a / in trivia, we must see either another / or a * */ (SawSlash, '/') => s = (next, LineCmt, next), (SawSlash, '*') => s = (s.0, MaybeDoc, next), (MaybeDoc, '*') => s = (s.0, MaybeDoc2, next), (MaybeDoc, _) => s = (s.0, EmbeddedCmt, next), (MaybeDoc2, '/') => s = (next, Free, next), /* Doc comments have a space after the second star */ (MaybeDoc2, c) if is_whitespace(c) => s = (s.0, DocComment, s.2), (MaybeDoc2, _) => s = (s.0, EmbeddedCmt, next), (DocComment, '*') => s = (s.0, EndDoc, next), (DocComment, _) => s = (s.0, DocComment, next), (EndDoc, _) => s = (s.0, DocComment, next), /* A * without a / does not end an embedded comment */ (EmbeddedCmt, '*') => s = (s.0, EndEmbedded, next), (EndEmbedded, '*') => s = (s.0, EndEmbedded, next), (EndEmbedded, _) => s = (s.0, EmbeddedCmt, next), /* Whitespace skips everywhere else */ (_, c) if is_whitespace(c) => s = (s.0, s.1, next), /* When scanning comments, anything else is accepted */ (LineCmt, _) => s = (s.0, s.1, next), (EmbeddedCmt, _) => s = (s.0, s.1, next), _ => break None, } } } let str = node.leading_text(env.indexed_source_text.source_text()); parse(str, 0, Free, 0).map(|(start, end, txt)| { let anchor = node.leading_start_offset(); let pos = env .indexed_source_text .relative_pos(anchor + start, anchor + end) .into(); (pos, txt) }) } fn p_xhp_child<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<ast::XhpChild> { use ast::XhpChild::*; use ast::XhpChildOp::*; match &node.children { Token(_) => pos_name(node, env).map(ChildName), PostfixUnaryExpression(c) => { let operand = p_xhp_child(&c.operand, env)?; let operator = match token_kind(&c.operator) { Some(TK::Question) => ChildQuestion, Some(TK::Plus) => ChildPlus, Some(TK::Star) => ChildStar, _ => missing_syntax("xhp children operator", node, env)?, }; Ok(ChildUnary(Box::new(operand), operator)) } BinaryExpression(c) => { let left = p_xhp_child(&c.left_operand, env)?; let right = p_xhp_child(&c.right_operand, env)?; Ok(ChildBinary(Box::new(left), Box::new(right))) } XHPChildrenParenthesizedList(c) => { let children: Result<Vec<_>, _> = c .xhp_children .syntax_node_to_list_skip_separator() .map(|c| p_xhp_child(c, env)) .collect(); Ok(ChildList(children?)) } _ => missing_syntax("xhp children", node, env), } } fn p_tconstraints_into_lower_and_upper<'a>( node: S<'a>, env: &mut Env<'a>, ) -> (Vec<ast::Hint>, Vec<ast::Hint>) { let mut lower = vec![]; let mut upper = vec![]; for constraint in node.syntax_node_to_list_skip_separator() { let (kind, ty) = match p_tconstraint(constraint, env) { Ok(v) => v, Err(e) => { emit_error(e, env); continue; } }; match kind { ast::ConstraintKind::ConstraintAs => upper.push(ty), ast::ConstraintKind::ConstraintSuper => lower.push(ty), _ => (), }; } (lower, upper) } fn merge_constraints( mut constraints: Vec<ast::Hint>, f: fn(Vec<ast::Hint>) -> ast::Hint_, ) -> Option<ast::Hint> { if constraints.len() == 1 { constraints.pop() } else { #[allow(clippy::manual_map)] // map doesn't allow moving out of borrowed constraints match constraints.first() { None => None, // no bounds Some(fst) => Some(ast::Hint::new(fst.0.clone(), f(constraints))), } } } fn p_method_vis<'a>(node: S<'a>, name_pos: &Pos, env: &mut Env<'a>) -> ast::Visibility { match p_visibility_last_win(node, env) { None => { let first_token_pos = match node.syntax_node_to_list_skip_separator().next() { Some(token_node) => p_pos(token_node, env), None => name_pos.clone(), }; raise_hh_error( env, Naming::method_needs_visibility(first_token_pos, name_pos.clone()), ); ast::Visibility::Public } Some(v) => v, } } fn has_fun_header( m: &MethodishDeclarationChildren<'_, PositionedToken<'_>, PositionedValue<'_>>, ) -> bool { matches!( m.function_decl_header.children, FunctionDeclarationHeader(_) ) } fn p_xhp_class_attr<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<Either<ast::XhpAttr, ast::Hint>> { let mk_attr_use = |n: S<'a>, env: &mut Env<'a>| { Ok(Either::Right(ast::Hint( p_pos(n, env), Box::new(ast::Hint_::Happly(pos_name(n, env)?, vec![])), ))) }; match &node.children { XHPClassAttribute(c) => { let ast::Id(p, name) = pos_name(&c.name, env)?; if let TypeConstant(_) = &c.type_.children { if env.is_typechecker() { raise_parsing_error( &c.type_, env, &syntax_error::xhp_class_attribute_type_constant, ) } } let req = match &c.required.children { XHPRequired(_) => Some(ast::XhpAttrTag::Required), XHPLateinit(_) => Some(ast::XhpAttrTag::LateInit), _ => None, }; let pos = if c.initializer.is_missing() { p.clone() } else { Pos::btw(&p, &p_pos(&c.initializer, env)).map_err(|message| { Error::ParsingError { message, pos: p.clone(), } })? }; let (hint, like, enum_values, enum_) = match &c.type_.children { XHPEnumType(c1) => { let p = p_pos(&c.type_, env); let like = match &c1.like.children { Missing => None, _ => Some(p_pos(&c1.like, env)), }; let vals = could_map(&c1.values, env, p_expr)?; let mut enum_vals = vec![]; for val in vals.clone() { match val { ast::Expr(_, _, Expr_::String(xev)) => { enum_vals.push(ast::XhpEnumValue::XEVString(xev.to_string())) } ast::Expr(_, _, Expr_::Int(xev)) => match xev.parse() { Ok(n) => enum_vals.push(ast::XhpEnumValue::XEVInt(n)), Err(_) => // Since we have parse checks for // malformed integer literals already, // we assume this won't happen and ignore // the case. {} }, _ => {} } } (None, like, enum_vals, Some((p, vals))) } _ => (Some(p_hint(&c.type_, env)?), None, vec![], None), }; let init_expr = map_optional(&c.initializer, env, p_simple_initializer)?; let xhp_attr = ast::XhpAttr( ast::TypeHint((), hint.clone()), ast::ClassVar { final_: false, xhp_attr: Some(ast::XhpAttrInfo { like, tag: req, enum_values, }), abstract_: false, readonly: false, visibility: ast::Visibility::Public, type_: ast::TypeHint((), hint), id: ast::Id(p, String::from(":") + &name), expr: init_expr, user_attributes: Default::default(), doc_comment: None, is_promoted_variadic: false, is_static: false, span: pos, }, req, enum_, ); Ok(Either::Left(xhp_attr)) } XHPSimpleClassAttribute(c) => mk_attr_use(&c.type_, env), Token(_) => mk_attr_use(node, env), _ => missing_syntax("XHP attribute", node, env), } } fn p_type_constant<'a>( node: S<'a>, doc_comment_opt: Option<DocComment>, env: &mut Env<'a>, cls: &mut ast::Class_, ) { match &node.children { TypeConstDeclaration(c) => { use ast::ClassTypeconst::TCAbstract; use ast::ClassTypeconst::TCConcrete; if !c.type_parameters.is_missing() { raise_parsing_error(node, env, &syntax_error::tparams_in_tconst); } let user_attributes = p_user_attributes(&c.attribute_spec, env); let type__ = map_optional_emit_error(&c.type_specifier, env, p_hint) .map(|hint| soften_hint(&user_attributes, hint)); let kinds = p_kinds(&c.modifiers, env); let name = match pos_name(&c.name, env) { Ok(name) => name, Err(e) => { emit_error(e, env); return; } }; // desugar multiple same-kinded constraints as folows: let (lower, upper) = p_tconstraints_into_lower_and_upper(&c.type_constraints, env); // `as num as T1` -> `as (num & T1)` let as_constraint = merge_constraints(upper, ast::Hint_::Hintersection); // `super int as T2` -> `as (int | T2)` let super_constraint = merge_constraints(lower, ast::Hint_::Hunion); let span = p_pos(node, env); let has_abstract = kinds.has(modifier::ABSTRACT); let kind = if has_abstract { TCAbstract(ast::ClassAbstractTypeconst { as_constraint, super_constraint, default: type__, }) } else if let Some(type_) = type__ { if env.is_typechecker() && (as_constraint.is_some() || super_constraint.is_some()) { raise_hh_error( env, NastCheck::partially_abstract_typeconst_definition( name.0.clone(), if as_constraint.is_some() { "as" } else { "super" }, ), ); } TCConcrete(ast::ClassConcreteTypeconst { c_tc_type: type_ }) } else { raise_hh_error(env, NastCheck::not_abstract_without_typeconst(name.0)); raise_missing_syntax("value for the type constant", node, env); return; }; cls.typeconsts.push(ast::ClassTypeconstDef { name, kind, user_attributes, span, doc_comment: doc_comment_opt, is_ctx: false, }) } _ => {} } } /// Given an FFP `node` that represents a class element (e.g a /// property, a method or a class constant), lower it to the /// equivalent AAST representation and store in `class`. /// /// If we encounter an error, write the error to `env` and don't add /// anything to `class`. fn p_class_elt<'a>(class: &mut ast::Class_, node: S<'a>, env: &mut Env<'a>) { let doc_comment_opt = extract_docblock(node, env); match &node.children { ConstDeclaration(c) => { let user_attributes = p_user_attributes(&c.attribute_spec, env); let kinds = p_kinds(&c.modifiers, env); let has_abstract = kinds.has(modifier::ABSTRACT); // TODO: make wrap `type_` `doc_comment` by `Rc` in ClassConst to avoid clone let type_ = map_optional_emit_error(&c.type_specifier, env, p_hint); let span = p_pos(node, env); let mut class_consts = could_map_emit_error(&c.declarators, env, |n, e| match &n.children { ConstantDeclarator(c) => { let id = pos_name(&c.name, e)?; let pos = &id.0; use aast::ClassConstKind::*; let kind = if has_abstract { CCAbstract(map_optional(&c.initializer, e, p_simple_initializer)?) } else { CCConcrete(p_const_value(&c.initializer, e, pos.clone())?) }; Ok(ast::ClassConst { user_attributes: user_attributes.clone(), type_: type_.clone(), id, kind, span: span.clone(), doc_comment: doc_comment_opt.clone(), }) } _ => missing_syntax("constant declarator", n, e), }); class.consts.append(&mut class_consts) } TypeConstDeclaration(_) => p_type_constant(node, doc_comment_opt, env, class), ContextConstDeclaration(c) => { use ast::ClassTypeconst::TCAbstract; use ast::ClassTypeconst::TCConcrete; if !c.type_parameters.is_missing() { raise_parsing_error(node, env, &syntax_error::tparams_in_tconst); } let name = match pos_name(&c.name, env) { Ok(name) => name, Err(e) => { emit_error(e, env); return; } }; let context = p_context_list_to_intersection( &c.ctx_list, env, "Context constants cannot alias polymorphic contexts", ); if let Some(ref hint) = context { use ast::Hint_::Happly; use ast::Hint_::Hintersection; let ast::Hint(_, ref h) = hint; if let Hintersection(hl) = &**h { for h in hl { let ast::Hint(_, ref h) = h; if let Happly(oxidized::ast::Id(_, id), _) = &**h { if id.as_str().ends_with("_local") { raise_parsing_error( &c.ctx_list, env, "Local contexts on ctx constants are not allowed", ); } } } } } let span = p_pos(node, env); let kinds = p_kinds(&c.modifiers, env); let has_abstract = kinds.has(modifier::ABSTRACT); let (super_constraint, as_constraint) = p_ctx_constraints(&c.constraint, env).unwrap_or((None, None)); let kind = if has_abstract { TCAbstract(ast::ClassAbstractTypeconst { as_constraint, super_constraint, default: context, }) } else if let Some(c_tc_type) = context { if env.is_typechecker() && (super_constraint.is_some() || as_constraint.is_some()) { raise_parsing_error( node, env, "Constraints on a context constant requires it to be abstract", ) }; TCConcrete(ast::ClassConcreteTypeconst { c_tc_type }) } else { raise_hh_error(env, NastCheck::not_abstract_without_typeconst(name.0)); raise_missing_syntax("value for the context constant", node, env); return; }; class.typeconsts.push(ast::ClassTypeconstDef { name, kind, user_attributes: Default::default(), span, doc_comment: doc_comment_opt, is_ctx: true, }); } PropertyDeclaration(c) => { let user_attributes = p_user_attributes(&c.attribute_spec, env); let type_ = map_optional_emit_error(&c.type_, env, p_hint) .map(|t| soften_hint(&user_attributes, t)); let kinds = p_kinds(&c.modifiers, env); let vis = p_visibility_last_win_or(&c.modifiers, env, ast::Visibility::Public); let doc_comment = if env.quick_mode { None } else { doc_comment_opt }; let name_exprs = could_map_emit_error(&c.declarators, env, |n, e| match &n.children { PropertyDeclarator(c) => { let name = pos_name_(&c.name, e, Some('$'))?; let pos = p_pos(n, e); let expr = map_optional(&c.initializer, e, p_simple_initializer)?; Ok((pos, name, expr)) } _ => missing_syntax("property declarator", n, e), }); for (i, name_expr) in name_exprs.into_iter().enumerate() { class.vars.push(ast::ClassVar { final_: kinds.has(modifier::FINAL), xhp_attr: None, abstract_: kinds.has(modifier::ABSTRACT), readonly: kinds.has(modifier::READONLY), visibility: vis, type_: ast::TypeHint((), type_.clone()), id: name_expr.1, expr: name_expr.2, user_attributes: user_attributes.clone(), doc_comment: if i == 0 { doc_comment.clone() } else { None }, is_promoted_variadic: false, is_static: kinds.has(modifier::STATIC), span: name_expr.0, }); } } MethodishDeclaration(c) if has_fun_header(c) => { // keep cls_generics *env.fn_generics_mut() = HashMap::default(); let classvar_init = |param: &ast::FunParam| -> (ast::Stmt, ast::ClassVar) { let cvname = drop_prefix(&param.name, '$'); let p = &param.pos; let span = match &param.expr { Some(ast::Expr(_, pos_end, _)) => { Pos::btw(p, pos_end).unwrap_or_else(|_| p.clone()) } _ => p.clone(), }; let e = |expr_: Expr_| -> ast::Expr { ast::Expr::new((), p.clone(), expr_) }; let lid = |s: &str| -> ast::Lid { ast::Lid(p.clone(), (0, s.to_string())) }; ( ast::Stmt::new( p.clone(), ast::Stmt_::mk_expr(e(Expr_::mk_binop(Binop { bop: ast::Bop::Eq(None), lhs: e(Expr_::mk_obj_get( e(Expr_::mk_lvar(lid(special_idents::THIS))), e(Expr_::mk_id(ast::Id(p.clone(), cvname.to_string()))), ast::OgNullFlavor::OGNullthrows, ast::PropOrMethod::IsProp, )), rhs: e(Expr_::mk_lvar(lid(&param.name))), }))), ), ast::ClassVar { final_: false, xhp_attr: None, abstract_: false, // We use the param readonlyness here to represent the // ClassVar's readonlyness once lowered // TODO(jjwu): Convert this to an enum when we support // multiple types of readonlyness readonly: param.readonly.is_some(), visibility: param.visibility.unwrap(), type_: param.type_hint.clone(), id: ast::Id(p.clone(), cvname.to_string()), expr: None, user_attributes: param.user_attributes.clone(), doc_comment: None, is_promoted_variadic: param.is_variadic, is_static: false, span, }, ) }; let header = &c.function_decl_header; let h = match &header.children { FunctionDeclarationHeader(h) => h, _ => panic!(), }; let hdr = match p_fun_hdr(header, env) { Ok(hdr) => hdr, Err(e) => { emit_error(e, env); return; } }; let (mut member_init, mut member_def): (Vec<ast::Stmt>, Vec<ast::ClassVar>) = hdr .parameters .iter() .filter_map(|p| p.visibility.map(|_| classvar_init(p))) .unzip(); let kinds = p_kinds(&h.modifiers, env); let visibility = p_method_vis(&h.modifiers, &hdr.name.0, env); let is_static = kinds.has(modifier::STATIC); let readonly_this = kinds.has(modifier::READONLY); *env.in_static_method() = is_static; check_effect_polymorphic_reification(hdr.contexts.as_ref(), env, node); let (mut body, body_has_yield) = match map_yielding(&c.function_body, env, p_function_body) { Ok(value) => value, Err(e) => { emit_error(e, env); return; } }; if env.codegen() { member_init.reverse(); } member_init.append(&mut body.0); let body = ast::Block(member_init); *env.in_static_method() = false; let is_abstract = kinds.has(modifier::ABSTRACT); let is_external = !is_abstract && c.function_body.is_external(); let user_attributes = p_user_attributes(&c.attribute, env); check_effect_memoized(hdr.contexts.as_ref(), &user_attributes, "method", env); let method = ast::Method_ { span: p_fun_pos(node, env), annotation: (), final_: kinds.has(modifier::FINAL), readonly_this, abstract_: is_abstract, static_: is_static, name: hdr.name, visibility, tparams: hdr.type_parameters, where_constraints: hdr.constrs, params: hdr.parameters, ctxs: hdr.contexts, unsafe_ctxs: hdr.unsafe_contexts, body: ast::FuncBody { fb_ast: body }, fun_kind: mk_fun_kind(hdr.suspension_kind, body_has_yield), user_attributes, readonly_ret: hdr.readonly_return, ret: ast::TypeHint((), hdr.return_type), external: is_external, doc_comment: doc_comment_opt, }; class.vars.append(&mut member_def); class.methods.push(method) } TraitUse(c) => { let mut uses = could_map_emit_error(&c.names, env, p_hint); class.uses.append(&mut uses) } RequireClause(c) => { use aast::RequireKind::*; use ast::Hint_; let hint = match p_hint(&c.name, env) { Ok(hint) => hint, Err(e) => { emit_error(e, env); return; } }; let require_kind = match token_kind(&c.kind) { Some(TK::Implements) => Some(RequireImplements), Some(TK::Extends) => Some(RequireExtends), Some(TK::Class) => { let ast::Hint(_pos, hint_) = &hint; match hint_.as_ref() { Hint_::Happly(_, v) => { if !(v.is_empty()) { /* in a `require class t;` trait constraint, t must be a non-generic class name */ raise_parsing_error( &c.name, env, &syntax_error::require_class_applied_to_generic, ) }; Some(RequireClass) } _ => { raise_missing_syntax("class name", &c.name, env); None } } } _ => { raise_missing_syntax("trait require kind", &c.kind, env); None } }; if let Some(require_kind) = require_kind { class.reqs.push(ClassReq(hint, require_kind)); } } XHPClassAttributeDeclaration(c) => { let attrs = could_map_emit_error(&c.attributes, env, p_xhp_class_attr); for attr in attrs.into_iter() { match attr { Either::Left(attr) => class.xhp_attrs.push(attr), Either::Right(xhp_attr_use) => class.xhp_attr_uses.push(xhp_attr_use), } } } XHPChildrenDeclaration(c) => { let p = p_pos(node, env); match p_xhp_child(&c.expression, env) { Ok(child) => { class.xhp_children.push((p, child)); } Err(e) => { emit_error(e, env); } } } XHPCategoryDeclaration(c) => { let p = p_pos(node, env); let categories = could_map_emit_error(&c.categories, env, |n, e| p_pstring_(n, e, Some('%'))); if let Some((_, cs)) = &class.xhp_category { if let Some(category) = cs.first() { raise_hh_error(env, NastCheck::multiple_xhp_category(category.0.clone())) } } class.xhp_category = Some((p, categories)) } _ => raise_missing_syntax("class element", node, env), } } fn contains_class_body<'a>( c: &ClassishDeclarationChildren<'_, PositionedToken<'a>, PositionedValue<'a>>, ) -> bool { matches!(&c.body.children, ClassishBody(_)) } fn p_where_constraint<'a>( is_class: bool, parent: S<'a>, node: S<'a>, env: &mut Env<'a>, ) -> Result<Vec<ast::WhereConstraintHint>> { match &node.children { Missing => Ok(vec![]), WhereClause(c) => { let f = |n: S<'a>, e: &mut Env<'a>| -> Result<ast::WhereConstraintHint> { match &n.children { WhereConstraint(c) => { use ast::ConstraintKind::*; let l = p_hint(&c.left_type, e)?; let o = &c.operator; let o = match token_kind(o) { Some(TK::Equal) => ConstraintEq, Some(TK::As) => ConstraintAs, Some(TK::Super) => ConstraintSuper, _ => missing_syntax("constraint operator", o, e)?, }; Ok(ast::WhereConstraintHint(l, o, p_hint(&c.right_type, e)?)) } _ => missing_syntax("where constraint", n, e), } }; c.constraints .syntax_node_to_list_skip_separator() .map(|n| f(n, env)) .collect() } _ => { if is_class { missing_syntax("classish declaration constraints", parent, env) } else { missing_syntax("function header constraints", parent, env) } } } } fn p_namespace_use_kind<'a>(kind: S<'a>, env: &mut Env<'a>) -> Result<ast::NsKind> { use ast::NsKind::*; match &kind.children { Missing => Ok(NSClassAndNamespace), _ => match token_kind(kind) { Some(TK::Namespace) => Ok(NSNamespace), Some(TK::Type) => Ok(NSClass), Some(TK::Function) => Ok(NSFun), Some(TK::Const) => Ok(NSConst), _ => missing_syntax("namespace use kind", kind, env), }, } } fn p_namespace_use_clause<'a>( prefix: Option<S<'a>>, kind: Result<ast::NsKind>, node: S<'a>, env: &mut Env<'a>, ) -> Result<(ast::NsKind, ast::Sid, ast::Sid)> { lazy_static! { static ref NAMESPACE_USE: regex::Regex = regex::Regex::new("[^\\\\]*$").unwrap(); } match &node.children { NamespaceUseClause(NamespaceUseClauseChildren { clause_kind, alias, name, .. }) => { let ast::Id(p, n) = match (prefix, pos_name(name, env)?) { (None, id) => id, (Some(prefix), ast::Id(p, n)) => ast::Id(p, pos_name(prefix, env)?.1 + &n), }; let alias = if alias.is_missing() { let x = NAMESPACE_USE.find(&n).unwrap().as_str(); ast::Id(p.clone(), x.to_string()) } else { pos_name(alias, env)? }; let kind = if clause_kind.is_missing() { kind } else { p_namespace_use_kind(clause_kind, env) }?; Ok(( kind, ast::Id( p, if !n.is_empty() && n.starts_with('\\') { n } else { String::from("\\") + &n }, ), alias, )) } _ => missing_syntax("namespace use clause", node, env), } } fn is_memoize_attribute_with_flavor(u: &aast::UserAttribute<(), ()>, flavor: Option<&str>) -> bool { sn::user_attributes::is_memoized(&u.name.1) && (match flavor { Some(flavor) => u.params.iter().any( |p| matches!(p, Expr(_, _, ast::Expr_::EnumClassLabel(ecl)) if ecl.1 == flavor), ), None => u.params.is_empty(), }) } fn pos_qualified_referenced_module_name<'a>( name: &ast::Sid, node: S<'a>, env: &mut Env<'a>, ) -> Result<ast::MdNameKind> { if let ModuleName(c) = &node.children { if let SyntaxList(l) = &c.parts.children { let p = p_pos(node, env); let mut s = String::with_capacity(node.width()); for i in l.iter() { match &i.children { ListItem(li) => match &li.item.children { Token(t) => match t.kind() { TK::SelfToken => { s += &name.1; } TK::Global => { return Ok(ast::MdNameKind::MDNameGlobal(p)); } TK::Star => { return Ok(ast::MdNameKind::MDNamePrefix(ast::Id(p, s))); } TK::Name => { if !s.is_empty() { s += "."; } s += text_str(&li.item, env); } _ => { return missing_syntax("module name", node, env); } }, _ => { return missing_syntax("module name", node, env); } }, _ => { return missing_syntax("module name", node, env); } } } return Ok(ast::MdNameKind::MDNameExact(ast::Id(p, s))); } } missing_syntax("module name", node, env) } fn p_module_exports<'a>( name: &ast::Sid, node: S<'a>, env: &mut Env<'a>, ) -> Result<Option<Vec<ast::MdNameKind>>> { match &node.children { Missing => Ok(None), ModuleExports(e) => Ok(Some( e.exports .syntax_node_to_list_skip_separator() .map(|n| pos_qualified_referenced_module_name(name, n, env)) .collect::<Result<Vec<_>, _>>()?, )), _ => missing_syntax("module exports", node, env), } } fn p_module_imports<'a>( name: &ast::Sid, node: S<'a>, env: &mut Env<'a>, ) -> Result<Option<Vec<ast::MdNameKind>>> { match &node.children { Missing => Ok(None), ModuleImports(e) => Ok(Some( e.imports .syntax_node_to_list_skip_separator() .map(|n| pos_qualified_referenced_module_name(name, n, env)) .collect::<Result<Vec<_>, _>>()?, )), _ => missing_syntax("module imports", node, env), } } fn check_effect_memoized<'a>( contexts: Option<&ast::Contexts>, user_attributes: &[aast::UserAttribute<(), ()>], kind: &str, env: &mut Env<'a>, ) { // functions with dependent contexts cannot be memoized if has_polymorphic_context(env, contexts) { if let Some(u) = user_attributes .iter() .find(|u| sn::user_attributes::is_memoized(&u.name.1)) { raise_parsing_error_pos( &u.name.0, env, &syntax_error::effect_polymorphic_memoized(kind), ) } } // memoized functions with zoned or zoned_with must be #KeyedByIC if has_any_policied_context(contexts) { if let Some(u) = user_attributes .iter() .find(|u| is_memoize_attribute_with_flavor(u, None)) { raise_parsing_error_pos( &u.name.0, env, &syntax_error::effect_policied_memoized(kind), ) } } // #KeyedByIC can only be used on functions with defaults or zoned* if let Some(u) = user_attributes .iter() .find(|u| is_memoize_attribute_with_flavor(u, Some(sn::memoize_option::KEYED_BY_IC))) { if !has_any_policied_or_defaults_context(contexts) { raise_parsing_error_pos( &u.name.0, env, &syntax_error::policy_sharded_memoized_without_policied(kind), ) } } // #(Soft)?MakeICInaccessible can only be used on functions with defaults if let Some(u) = user_attributes.iter().find(|u| { is_memoize_attribute_with_flavor(u, Some(sn::memoize_option::MAKE_IC_INACCESSSIBLE)) || is_memoize_attribute_with_flavor( u, Some(sn::memoize_option::SOFT_MAKE_IC_INACCESSSIBLE), ) }) { if !has_any_context( contexts, vec![ sn::coeffects::DEFAULTS, sn::coeffects::LEAK_SAFE_LOCAL, sn::coeffects::LEAK_SAFE_SHALLOW, ], ) { raise_parsing_error_pos( &u.name.0, env, &syntax_error::memoize_make_ic_inaccessible_without_defaults(kind), ) } } // functions whose contexts prevent getting the IC (effectively <= [leak_safe, globals]) // cannot pass a memoize argument if contexts_cannot_access_ic(contexts) { if let Some(u) = user_attributes .iter() .find(|u| sn::user_attributes::is_memoized(&u.name.1) && !u.params.is_empty()) { raise_parsing_error_pos( &u.name.0, env, &syntax_error::memoize_category_without_implicit_policy_capability(kind), ) } } } fn check_context_has_this<'a>(contexts: Option<&ast::Contexts>, env: &mut Env<'a>) { use ast::Hint_::Haccess; use ast::Hint_::Happly; if let Some(ast::Contexts(pos, ref context_hints)) = contexts { context_hints.iter().for_each(|c| match *c.1 { Haccess(ref root, _) => match &*root.1 { Happly(oxidized::ast::Id(_, id), _) if strip_ns(id.as_str()) == sn::typehints::THIS => { raise_parsing_error_pos( pos, env, "this:: context is not allowed on top level functions", ) } _ => {} }, _ => {} }); } } fn check_effect_polymorphic_reification<'a>( contexts: Option<&ast::Contexts>, env: &mut Env<'a>, node: S<'a>, ) { use ast::Hint_::Haccess; use ast::Hint_::Happly; if let Some(ast::Contexts(_, ref context_hints)) = contexts { context_hints.iter().for_each(|c| match *c.1 { Haccess(ref root, _) => match &*root.1 { Happly(oxidized::ast::Id(_, id), _) => { fail_if_invalid_reified_generic(node, env, strip_ns(id.as_str())) } _ => {} }, _ => {} }); } } fn p_const_value<'a>(node: S<'a>, env: &mut Env<'a>, default_pos: Pos) -> Result<ast::Expr> { match &node.children { SimpleInitializer(c) => p_expr(&c.value, env), _ if env.file_mode() == file_info::Mode::Mhhi && !env.codegen() => { // We use Omitted as a placeholder here because we don't care about // the constant's value when in HHI mode Ok(Expr::new((), default_pos, Expr_::Omitted)) } _ => missing_syntax("simple initializer", node, env), } } fn p_def<'a>(node: S<'a>, env: &mut Env<'a>) -> Result<Vec<ast::Def>> { let doc_comment_opt = extract_docblock(node, env); match &node.children { FunctionDeclaration(FunctionDeclarationChildren { attribute_spec, declaration_header, body, }) => { let mut env = Env::clone_and_unset_toplevel_if_toplevel(env); let env = env.as_mut(); env.clear_generics(); let hdr = p_fun_hdr(declaration_header, env)?; let is_external = body.is_external(); let (block, yield_) = if is_external { (Default::default(), false) } else { map_yielding(body, env, p_function_body)? }; let user_attributes = p_user_attributes(attribute_spec, env); check_effect_memoized(hdr.contexts.as_ref(), &user_attributes, "function", env); check_context_has_this(hdr.contexts.as_ref(), env); let ret = ast::TypeHint((), hdr.return_type); let fun = ast::Fun_ { span: p_fun_pos(node, env), readonly_this: hdr.readonly_this, annotation: (), ret, readonly_ret: hdr.readonly_return, params: hdr.parameters, ctxs: hdr.contexts, unsafe_ctxs: hdr.unsafe_contexts, body: ast::FuncBody { fb_ast: block }, fun_kind: mk_fun_kind(hdr.suspension_kind, yield_), user_attributes, external: is_external, doc_comment: doc_comment_opt, }; Ok(vec![ast::Def::mk_fun(ast::FunDef { namespace: mk_empty_ns_env(env), file_attributes: vec![], mode: env.file_mode(), name: hdr.name, fun, internal: hdr.internal, module: None, tparams: hdr.type_parameters, where_constraints: hdr.constrs, })]) } ClassishDeclaration(c) if contains_class_body(c) => { let mut env = Env::clone_and_unset_toplevel_if_toplevel(env); let env = env.as_mut(); let mode = env.file_mode(); let user_attributes = p_user_attributes(&c.attribute, env); let docs_url = p_docs_url(&user_attributes, env); let kinds = p_kinds(&c.modifiers, env); let final_ = kinds.has(modifier::FINAL); let is_xhp = matches!( token_kind(&c.name), Some(TK::XHPElementName) | Some(TK::XHPClassName) ); let has_xhp_keyword = matches!(token_kind(&c.xhp), Some(TK::XHP)); let name = pos_name(&c.name, env)?; env.clear_generics(); let tparams = p_tparam_l(true, &c.type_parameters, env)?; let class_kind = match token_kind(&c.keyword) { Some(TK::Class) if kinds.has(modifier::ABSTRACT) => { ast::ClassishKind::Cclass(ast::Abstraction::Abstract) } Some(TK::Class) => ast::ClassishKind::Cclass(ast::Abstraction::Concrete), Some(TK::Interface) => ast::ClassishKind::Cinterface, Some(TK::Trait) => ast::ClassishKind::Ctrait, Some(TK::Enum) => ast::ClassishKind::Cenum, _ => missing_syntax("class kind", &c.keyword, env)?, }; let extends = could_map(&c.extends_list, env, p_hint)?; *env.parent_maybe_reified() = match extends.first().map(|h| h.1.as_ref()) { Some(ast::Hint_::Happly(_, hl)) => !hl.is_empty(), _ => false, }; let implements = could_map(&c.implements_list, env, p_hint)?; let where_constraints = p_where_constraint(true, node, &c.where_clause, env)?; let namespace = mk_empty_ns_env(env); let span = p_pos(node, env); let mut class_ = ast::Class_ { span, annotation: (), mode, final_, is_xhp, has_xhp_keyword, kind: class_kind, name, tparams, extends, uses: vec![], xhp_attr_uses: vec![], xhp_category: None, reqs: vec![], implements, where_constraints, consts: vec![], typeconsts: vec![], vars: vec![], methods: vec![], xhp_children: vec![], xhp_attrs: vec![], namespace, user_attributes, file_attributes: vec![], enum_: None, doc_comment: doc_comment_opt, emit_id: None, internal: kinds.has(modifier::INTERNAL), module: None, docs_url, }; match &c.body.children { ClassishBody(c1) => { for elt in c1.elements.syntax_node_to_list_skip_separator() { p_class_elt(&mut class_, elt, env); } } _ => missing_syntax("classish body", &c.body, env)?, } Ok(vec![ast::Def::mk_class(class_)]) } ConstDeclaration(c) => { let ty = &c.type_specifier; let decls = c.declarators.syntax_node_to_list_skip_separator(); let mut defs = vec![]; for decl in decls { let def = match &decl.children { ConstantDeclarator(c) => { let name = &c.name; let init = &c.initializer; let gconst = ast::Gconst { annotation: (), mode: env.file_mode(), name: pos_name(name, env)?, type_: map_optional(ty, env, p_hint)?, value: p_const_value(init, env, p_pos(name, env))?, namespace: mk_empty_ns_env(env), span: p_pos(node, env), emit_id: None, }; ast::Def::mk_constant(gconst) } _ => missing_syntax("constant declaration", decl, env)?, }; defs.push(def); } Ok(defs) } AliasDeclaration(c) => { let tparams = p_tparam_l(false, &c.generic_parameter, env)?; let kinds = p_kinds(&c.modifiers, env); let is_module_newtype = !c.module_kw_opt.is_missing(); for tparam in tparams.iter() { if tparam.reified != ast::ReifyKind::Erased { raise_parsing_error(node, env, &syntax_error::invalid_reified) } } let user_attributes = itertools::concat( c.attribute_spec .syntax_node_to_list_skip_separator() .map(|attr| p_user_attribute(attr, env)) .collect::<Result<Vec<ast::UserAttributes>, _>>()?, ); let docs_url = p_docs_url(&user_attributes, env); let (super_constraints, as_constraints) = p_tconstraints_into_lower_and_upper(&c.constraint, env); let require_one = &mut |kind: &str, cs: Vec<_>| { if cs.len() > 1 { let msg = format!( "Multiple `{}` constraints on an alias are not allowed", kind ); raise_parsing_error(node, env, &msg); } cs.into_iter().next() }; let as_constraint = require_one("as", as_constraints); let super_constraint = require_one("super", super_constraints); Ok(vec![ast::Def::mk_typedef(ast::Typedef { annotation: (), name: pos_name(&c.name, env)?, tparams, as_constraint, super_constraint, user_attributes, file_attributes: vec![], namespace: mk_empty_ns_env(env), mode: env.file_mode(), vis: match token_kind(&c.keyword) { Some(TK::Type) => ast::TypedefVisibility::Transparent, Some(TK::Newtype) if is_module_newtype => ast::TypedefVisibility::OpaqueModule, Some(TK::Newtype) => ast::TypedefVisibility::Opaque, _ => missing_syntax("kind", &c.keyword, env)?, }, kind: p_hint(&c.type_, env)?, span: p_pos(node, env), emit_id: None, is_ctx: false, internal: kinds.has(modifier::INTERNAL), module: None, docs_url, doc_comment: doc_comment_opt, })]) } CaseTypeDeclaration(c) => { let kinds = p_kinds(&c.modifiers, env); let tparams = p_tparam_l(false, &c.generic_parameter, env)?; for tparam in tparams.iter() { if tparam.reified != ast::ReifyKind::Erased { raise_parsing_error(node, env, &syntax_error::invalid_reified) } } let user_attributes = itertools::concat( c.attribute_spec .syntax_node_to_list_skip_separator() .map(|attr| p_user_attribute(attr, env)) .collect::<Result<Vec<ast::UserAttributes>, _>>()?, ); let docs_url = p_docs_url(&user_attributes, env); let expect_hint = |node, env: &mut _| match p_hint(node, env) { Ok(hint) => Some(hint), Err(e) => { emit_error(e, env); None } }; let as_constraints = c .bounds .syntax_node_to_list_skip_separator() .filter_map(|bound| expect_hint(bound, env)) .collect::<Vec<_>>(); let variants = c .variants .syntax_node_to_list() .filter_map(|variant| { if let CaseTypeVariant(ctv) = &variant.children { expect_hint(&ctv.type_, env) } else { None } }) .collect::<Vec<_>>(); // If there are more than one constraints create an intersection let as_constraint = if as_constraints.len() > 1 { let hint_ = ast::Hint_::Hintersection(as_constraints); let pos = p_pos(&c.bounds, env); Some(ast::Hint::new(pos, hint_)) } else { as_constraints.into_iter().next() }; // If there are more than one variants create an union let kind = if variants.len() > 1 { let hint_ = ast::Hint_::Hunion(variants); let pos = p_pos(&c.variants, env); ast::Hint::new(pos, hint_) } else { match variants.into_iter().next() { Some(hint) => hint, // If there less than one variant it is an ill-defined case type None => return missing_syntax("case type variant", node, env), } }; Ok(vec![ast::Def::mk_typedef(ast::Typedef { annotation: (), name: pos_name(&c.name, env)?, tparams, as_constraint, super_constraint: None, user_attributes, file_attributes: vec![], namespace: mk_empty_ns_env(env), mode: env.file_mode(), vis: ast::TypedefVisibility::CaseType, kind, span: p_pos(node, env), emit_id: None, is_ctx: false, internal: kinds.has(modifier::INTERNAL), module: None, docs_url, doc_comment: doc_comment_opt, })]) } ContextAliasDeclaration(c) => { let (super_constraint, as_constraint) = p_ctx_constraints(&c.as_constraint, env)?; let pos_name = pos_name(&c.name, env)?; if let Some(first_char) = pos_name.1.chars().next() { if first_char.is_lowercase() { raise_parsing_error( &c.name, env, &syntax_error::user_ctx_should_be_caps(&pos_name.1), ) } } if as_constraint.is_none() { raise_parsing_error( &c.name, env, &syntax_error::user_ctx_require_as(&pos_name.1), ) } let kind = match p_context_list_to_intersection( &c.context, env, "Context aliases cannot alias polymorphic contexts", ) { Some(h) => h, None => { let pos = pos_name.0.clone(); let hint_ = ast::Hint_::Happly(ast::Id(pos.clone(), String::from("defaults")), vec![]); ast::Hint::new(pos, hint_) } }; Ok(vec![ast::Def::mk_typedef(ast::Typedef { annotation: (), name: pos_name, tparams: vec![], as_constraint, super_constraint, user_attributes: itertools::concat( c.attribute_spec .syntax_node_to_list_skip_separator() .map(|attr| p_user_attribute(attr, env)) .collect::<Result<Vec<ast::UserAttributes>, _>>()?, ), namespace: mk_empty_ns_env(env), mode: env.file_mode(), file_attributes: vec![], vis: ast::TypedefVisibility::Opaque, kind, span: p_pos(node, env), emit_id: None, is_ctx: true, // TODO(T116039119): Populate value with presence of internal attribute internal: false, module: None, docs_url: None, doc_comment: doc_comment_opt, })]) } EnumDeclaration(c) => { let span = p_pos(node, env); let p_enumerator = |n: S<'a>, e: &mut Env<'a>| -> Result<ast::ClassConst> { match &n.children { Enumerator(c) => Ok(ast::ClassConst { user_attributes: Default::default(), type_: None, id: pos_name(&c.name, e)?, kind: ast::ClassConstKind::CCConcrete(p_expr(&c.value, e)?), span: span.clone(), doc_comment: None, }), _ => missing_syntax("enumerator", n, e), } }; let kinds = p_kinds(&c.modifiers, env); let mut includes = vec![]; let mut p_enum_use = |n: S<'a>, e: &mut Env<'a>| -> Result<()> { match &n.children { EnumUse(c) => { let mut uses = could_map(&c.names, e, p_hint)?; Ok(includes.append(&mut uses)) } _ => missing_syntax("enum_use", node, e), } }; for elt in c.use_clauses.syntax_node_to_list_skip_separator() { p_enum_use(elt, env)?; } let user_attributes = p_user_attributes(&c.attribute_spec, env); let docs_url = p_docs_url(&user_attributes, env); Ok(vec![ast::Def::mk_class(ast::Class_ { annotation: (), mode: env.file_mode(), user_attributes, file_attributes: vec![], final_: false, kind: ast::ClassishKind::Cenum, is_xhp: false, has_xhp_keyword: false, name: pos_name(&c.name, env)?, tparams: vec![], extends: vec![], implements: vec![], where_constraints: vec![], consts: could_map(&c.enumerators, env, p_enumerator)?, namespace: mk_empty_ns_env(env), span: p_pos(node, env), enum_: Some(ast::Enum_ { base: p_hint(&c.base, env)?, constraint: map_optional(&c.type_, env, p_tconstraint_ty)?, includes, }), doc_comment: doc_comment_opt, uses: vec![], xhp_attr_uses: vec![], xhp_category: None, reqs: vec![], vars: vec![], typeconsts: vec![], methods: vec![], xhp_children: vec![], xhp_attrs: vec![], emit_id: None, internal: kinds.has(modifier::INTERNAL), module: None, docs_url, })]) } EnumClassDeclaration(c) => { let name = pos_name(&c.name, env)?; // Adding __EnumClass let mut user_attributes = p_user_attributes(&c.attribute_spec, env); let enum_class_attribute = ast::UserAttribute { name: ast::Id(name.0.clone(), special_attrs::ENUM_CLASS.to_string()), params: vec![], }; user_attributes.push(enum_class_attribute); let docs_url = p_docs_url(&user_attributes, env); // During lowering we store the base type as is. It will be updated during // the naming phase let base_type = p_hint(&c.base, env)?; let name_s = name.1.clone(); // TODO: can I avoid this clone ? let kinds = p_kinds(&c.modifiers, env); let class_kind = if kinds.has(modifier::ABSTRACT) { ast::ClassishKind::CenumClass(ast::Abstraction::Abstract) } else { ast::ClassishKind::CenumClass(ast::Abstraction::Concrete) }; // Helper to build X -> HH\MemberOf<enum_name, X> let build_elt = |p: Pos, ty: ast::Hint| -> ast::Hint { let enum_name = ast::Id(p.clone(), name_s.clone()); let enum_class = ast::Hint_::mk_happly(enum_name, vec![]); let enum_class = ast::Hint::new(p.clone(), enum_class); let elt_id = ast::Id(p.clone(), special_classes::MEMBER_OF.to_string()); let full_type = ast::Hint_::mk_happly(elt_id, vec![enum_class, ty]); ast::Hint::new(p, full_type) }; let extends = could_map(&c.extends_list, env, p_hint)?; let mut enum_class = ast::Class_ { annotation: (), mode: env.file_mode(), user_attributes, file_attributes: vec![], final_: false, // TODO(T77095784): support final EDTs kind: class_kind, is_xhp: false, has_xhp_keyword: false, name, tparams: vec![], extends: extends.clone(), implements: vec![], where_constraints: vec![], consts: vec![], namespace: mk_empty_ns_env(env), span: p_pos(node, env), enum_: Some(ast::Enum_ { base: base_type, constraint: None, includes: extends, }), doc_comment: doc_comment_opt, uses: vec![], xhp_attr_uses: vec![], xhp_category: None, reqs: vec![], vars: vec![], typeconsts: vec![], methods: vec![], xhp_children: vec![], xhp_attrs: vec![], emit_id: None, internal: kinds.has(modifier::INTERNAL), module: None, docs_url, }; for n in c.elements.syntax_node_to_list_skip_separator() { match &n.children { // TODO(T77095784): check pos and span usage EnumClassEnumerator(c) => { // we turn: // - type name = expression; // into // - const MemberOf<enum_name, type> name = expression let name = pos_name(&c.name, env)?; let pos = &name.0; let kinds = p_kinds(&c.modifiers, env); let has_abstract = kinds.has(modifier::ABSTRACT); let elt_type = p_hint(&c.type_, env)?; let full_type = build_elt(pos.clone(), elt_type); let kind = if has_abstract { ast::ClassConstKind::CCAbstract(None) } else { ast::ClassConstKind::CCConcrete(p_simple_initializer( &c.initializer, env, )?) }; let class_const = ast::ClassConst { user_attributes: Default::default(), type_: Some(full_type), id: name, kind, span: p_pos(node, env), doc_comment: None, }; enum_class.consts.push(class_const) } TypeConstDeclaration(_) => { let doc_comment_opt = extract_docblock(n, env); p_type_constant(n, doc_comment_opt, env, &mut enum_class) } _ => { let pos = p_pos(n, env); raise_parsing_error_pos( &pos, env, &syntax_error::invalid_enum_class_enumerator, ) } } } Ok(vec![ast::Def::mk_class(enum_class)]) } InclusionDirective(c) if env.file_mode() != file_info::Mode::Mhhi || env.codegen() => { let expr = p_expr(&c.expression, env)?; Ok(vec![ast::Def::mk_stmt(ast::Stmt::new( p_pos(node, env), ast::Stmt_::mk_expr(expr), ))]) } NamespaceDeclaration(c) => { let name = if let NamespaceDeclarationHeader(h) = &c.header.children { &h.name } else { return missing_syntax("namespace_declaration_header", node, env); }; let defs = match &c.body.children { NamespaceBody(c) => { let mut env1 = Env::clone_and_unset_toplevel_if_toplevel(env); let env1 = env1.as_mut(); itertools::concat( c.declarations .syntax_node_to_list_skip_separator() .map(|n| p_def(n, env1)) .collect::<Result<Vec<Vec<_>>, _>>()?, ) } _ => vec![], }; Ok(vec![ast::Def::mk_namespace(pos_name(name, env)?, defs)]) } NamespaceGroupUseDeclaration(c) => { let uses: Result<Vec<_>, _> = c .clauses .syntax_node_to_list_skip_separator() .map(|n| { p_namespace_use_clause( Some(&c.prefix), p_namespace_use_kind(&c.kind, env), n, env, ) }) .collect(); Ok(vec![ast::Def::mk_namespace_use(uses?)]) } NamespaceUseDeclaration(c) => { let uses: Result<Vec<_>, _> = c .clauses .syntax_node_to_list_skip_separator() .map(|n| p_namespace_use_clause(None, p_namespace_use_kind(&c.kind, env), n, env)) .collect(); Ok(vec![ast::Def::mk_namespace_use(uses?)]) } FileAttributeSpecification(_) => { Ok(vec![ast::Def::mk_file_attributes(ast::FileAttribute { user_attributes: p_user_attribute(node, env)?, namespace: mk_empty_ns_env(env), })]) } ModuleDeclaration(md) => { let name = pos_module_name(&md.name, env)?; let exports = p_module_exports(&name, &md.exports, env)?; let imports = p_module_imports(&name, &md.imports, env)?; Ok(vec![ast::Def::mk_module(ast::ModuleDef { annotation: (), name, user_attributes: p_user_attributes(&md.attribute_spec, env), file_attributes: vec![], span: p_pos(node, env), mode: env.file_mode(), doc_comment: doc_comment_opt, exports, imports, })]) } ModuleMembershipDeclaration(mm) => { Ok(vec![ast::Def::mk_set_module(pos_name(&mm.name, env)?)]) } _ if env.file_mode() == file_info::Mode::Mhhi => Ok(vec![]), _ => Ok(vec![ast::Def::mk_stmt(p_stmt(node, env)?)]), } } fn post_process<'a>(env: &mut Env<'a>, program: Vec<ast::Def>, acc: &mut Vec<ast::Def>) { use aast::Def; use aast::Def::*; use aast::Stmt_::*; let mut saw_ns: Option<(ast::Sid, Vec<ast::Def>)> = None; for def in program.into_iter() { if let Namespace(_) = &def { if let Some((n, ns_acc)) = saw_ns { acc.push(Def::mk_namespace(n, ns_acc)); saw_ns = None; } } if let Namespace(ns) = def { let (n, defs) = *ns; if defs.is_empty() { saw_ns = Some((n, vec![])); } else { let mut acc_ = vec![]; post_process(env, defs, &mut acc_); acc.push(Def::mk_namespace(n, acc_)); } continue; } if let Stmt(s) = &def { if s.1.is_noop() { continue; } let raise_error = match &s.1 { Markup(_) => false, Expr(expr) if expr.as_ref().is_import() && !env.parser_options.po_disallow_toplevel_requires => { false } _ => { use file_info::Mode::*; let mode = env.file_mode(); env.is_typechecker() && (mode == Mstrict) } }; if raise_error { raise_parsing_error_pos(&s.0, env, &syntax_error::toplevel_statements); } } if let Some((_, ns_acc)) = &mut saw_ns { ns_acc.push(def); } else { acc.push(def); }; } if let Some((n, defs)) = saw_ns { acc.push(Def::mk_namespace(n, defs)); } } fn p_program<'a>(node: S<'a>, env: &mut Env<'a>) -> ast::Program { let nodes = node.syntax_node_to_list_skip_separator(); let mut acc = vec![]; for n in nodes { match &n.children { EndOfFile(_) => break, _ => match p_def(n, env) { Err(e) => { emit_error(e, env); } Ok(mut def) => acc.append(&mut def), }, } } let mut program = vec![]; post_process(env, acc, &mut program); ast::Program(program) } fn p_script<'a>(node: S<'a>, env: &mut Env<'a>) -> ast::Program { match &node.children { Script(c) => p_program(&c.declarations, env), _ => { raise_missing_syntax("script", node, env); ast::Program(vec![]) } } } /// Convert the FFP syntax `script` to an AAST. /// /// If we encounter parse errors, write them to `env`, and return as /// much of an AAST as we can. pub fn lower<'a>(env: &mut Env<'a>, script: S<'a>) -> ast::Program { p_script(script, env) }
Rust
hhvm/hphp/hack/src/parser/lowerer/modifier.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use oxidized::aast::Visibility; use parser_core_types::token_kind::TokenKind as TK; #[derive(Copy, Clone, Eq, PartialEq)] pub struct Kind(u32); pub const XHP: Kind = Kind(1u32); pub const FINAL: Kind = Kind(1u32 << 1); pub const STATIC: Kind = Kind(1u32 << 2); pub const ABSTRACT: Kind = Kind(1u32 << 3); pub const PRIVATE: Kind = Kind(1u32 << 4); pub const PUBLIC: Kind = Kind(1u32 << 5); pub const PROTECTED: Kind = Kind(1u32 << 6); pub const VAR: Kind = Kind(1u32 << 7); pub const ASYNC: Kind = Kind(1u32 << 8); pub const READONLY: Kind = Kind(1u32 << 9); pub const INTERNAL: Kind = Kind(1u32 << 10); pub fn from_token_kind(t: TK) -> Option<Kind> { match t { TK::Final => Some(FINAL), TK::Static => Some(STATIC), TK::Abstract => Some(ABSTRACT), TK::Private => Some(PRIVATE), TK::Public => Some(PUBLIC), TK::Protected => Some(PROTECTED), TK::Internal => Some(INTERNAL), TK::Var => Some(VAR), TK::Async => Some(ASYNC), TK::XHP => Some(XHP), TK::Readonly => Some(READONLY), _ => None, } } pub fn to_visibility(kind: Kind) -> Option<Visibility> { match kind { PUBLIC => Some(Visibility::Public), PRIVATE => Some(Visibility::Private), PROTECTED => Some(Visibility::Protected), INTERNAL => Some(Visibility::Internal), _ => None, } } #[derive(Copy, Clone)] pub struct KindSet(u32); impl KindSet { pub fn new() -> Self { KindSet(0) } pub fn add(&mut self, kind: Kind) { self.0 |= kind.0 } pub fn has(&self, kind: Kind) -> bool { self.0 & kind.0 > 0 } }
Rust
hhvm/hphp/hack/src/parser/lowerer/scour_comment.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use oxidized::i_set::ISet; use oxidized::pos::Pos; use oxidized::prim_defs::Comment; use oxidized::scoured_comments::ScouredComments; use parser_core_types::indexed_source_text::IndexedSourceText; use parser_core_types::lexable_token::LexablePositionedToken; use parser_core_types::lexable_trivia::LexableTrivium; use parser_core_types::positioned_trivia::PositionedTrivium; use parser_core_types::source_text::SourceText; use parser_core_types::syntax_by_ref::syntax::Syntax; use parser_core_types::syntax_by_ref::syntax_variant_generated::SyntaxVariant::*; use parser_core_types::syntax_trait::SyntaxTrait; use parser_core_types::trivia_kind::TriviaKind; use regex::bytes::Regex; use rescan_trivia::RescanTrivia; /** The full fidelity parser considers all comments "simply" trivia. Some * comments have meaning, though. This meaning can either be relevant for the * type checker (like HH_FIXME, etc.), but also for other uses, like * Codex, where comments are used for documentation generation. * * Inlining the scrape for comments in the lowering code would be prohibitively * complicated, but a separate pass is fine. */ pub struct ScourComment<'a, T, V> { pub indexed_source_text: &'a IndexedSourceText<'a>, pub include_line_comments: bool, pub allowed_decl_fixme_codes: &'a ISet, pub phantom: std::marker::PhantomData<(*const T, *const V)>, pub disable_hh_ignore_error: isize, } impl<'src, 'arena, T, V> ScourComment<'src, T, V> where T: LexablePositionedToken + RescanTrivia<PositionedTrivium> + 'arena, V: 'arena, Syntax<'arena, T, V>: SyntaxTrait, { pub fn scour_comments<'r>(&self, top_node: &'r Syntax<'arena, T, V>) -> ScouredComments where 'r: 'arena, { let mut acc = ScouredComments::new(); let mut stack: Vec<(&'r Syntax<'arena, T, V>, bool)> = vec![(top_node, false)]; while let Some((node, mut in_block)) = stack.pop() { match &node.children { CompoundStatement(_) => in_block = true, Token(t) => { if t.has_trivia_kind(TriviaKind::DelimitedComment) || (self.include_line_comments && t.has_trivia_kind(TriviaKind::SingleLineComment)) || (t.has_trivia_kind(TriviaKind::FixMe) || (t.has_trivia_kind(TriviaKind::IgnoreError) && self.disable_hh_ignore_error <= 1)) { let leading = t.scan_leading(self.source_text()); let trailing = t.scan_trailing(self.source_text()); for tr in leading.iter().chain(trailing.iter()) { self.on_trivia(in_block, node, tr, &mut acc); } } continue; } _ => {} } stack.extend(node.iter_children().rev().map(|c| (c, in_block))); } acc.comments.reverse(); acc } fn on_trivia( &self, in_block: bool, node: &Syntax<'arena, T, V>, t: &PositionedTrivium, acc: &mut ScouredComments, ) { use relative_path::Prefix; use TriviaKind::*; match t.kind() { WhiteSpace | EndOfLine | FallThrough | ExtraTokenError => {} DelimitedComment => { let start = t.start_offset() + 2; let end = t.end_offset(); let len = end - start - 1; let p = self.pos_of_offset(end - 1, end); let text = self.source_text().sub_as_str(start, len).to_string(); acc.comments.push((p, Comment::CmtBlock(text))); } SingleLineComment => { if self.include_line_comments { let text = self.source_text().text(); let start = t.start_offset(); let start = start + if text[start] == b'#' { 1 } else { 2 }; let end = t.end_offset(); let len = end + 1 - start; let p = self.pos_of_offset(start, end); let mut text = self.source_text().sub_as_str(start, len).to_string(); text.push('\n'); acc.comments.push((p, Comment::CmtLine(text))); } } FixMe | IgnoreError => { lazy_static! { static ref IGNORE_ERROR: Regex = Regex::new(r#"HH_(?:FIXME|IGNORE_ERROR)[ \t\n]*\[([0-9]+)\]"#).unwrap(); } let text = t.text_raw(self.source_text()); let pos = self.p_pos(node); let line = pos.line() as isize; let p = self.pos_of_offset(t.start_offset(), t.end_offset() + 1); match IGNORE_ERROR .captures(text) .and_then(|c| c.get(1)) .map(|m| m.as_bytes()) { Some(code) => { let code = std::str::from_utf8(code).unwrap(); let code: isize = std::str::FromStr::from_str(code).unwrap(); let in_hhi = pos.filename().prefix() == Prefix::Hhi; if !(in_block || in_hhi || self.allowed_decl_fixme_codes.contains(&code)) { acc.add_to_misuses(line, code, p); } else if self.disable_hh_ignore_error == 1 && t.kind() == IgnoreError { acc.add_disallowed_ignore(p); } else { acc.add_to_fixmes(line, code, p); } } None => { // Errors.fixme_format pos; acc.add_format_error(pos); } } } } } fn source_text(&self) -> &'src SourceText<'src> { self.indexed_source_text.source_text() } fn p_pos(&self, node: &Syntax<'arena, T, V>) -> Pos { node.position_exclusive(self.indexed_source_text) .map_or(Pos::NONE, Into::into) } fn pos_of_offset(&self, start: usize, end: usize) -> Pos { self.indexed_source_text.relative_pos(start, end).into() } }
Rust
hhvm/hphp/hack/src/parser/rust_parser_errors_ffi/rust_parser_errors_ffi.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. // Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::collections::HashSet; use bumpalo::Bump; use ocamlrep::ptr::UnsafeOcamlPtr; use ocamlrep::FromOcamlRep; use ocamlrep_ocamlpool::ocaml_ffi; use oxidized::parser_options::ParserOptions; use parser_core_types::source_text::SourceText; use parser_core_types::syntax_by_ref::positioned_syntax::PositionedSyntax; use parser_core_types::syntax_error::SyntaxError; use parser_core_types::syntax_tree::SyntaxTree; // "only_for_parser_errors" because it sets only a subset of options relevant to parser errors, // leaving the rest default unsafe fn parser_options_from_ocaml_only_for_parser_errors( ocaml_opts: UnsafeOcamlPtr, ) -> (ParserOptions, (bool, bool, bool, bool)) { let ocaml_opts = ocaml_opts.as_usize() as *const usize; // Keep in sync with src/options/parserOptions.ml let hhvm_compat_mode = bool::from_ocaml(*ocaml_opts.add(0)).unwrap(); let hhi_mode = bool::from_ocaml(*ocaml_opts.add(1)).unwrap(); let codegen = bool::from_ocaml(*ocaml_opts.add(2)).unwrap(); let mut parser_options = ParserOptions::default(); let po_disable_lval_as_an_expression = bool::from_ocaml(*ocaml_opts.add(3)).unwrap(); let po_disable_legacy_soft_typehints = bool::from_ocaml(*ocaml_opts.add(4)).unwrap(); let tco_const_static_props = bool::from_ocaml(*ocaml_opts.add(5)).unwrap(); let po_disable_legacy_attribute_syntax = bool::from_ocaml(*ocaml_opts.add(6)).unwrap(); let po_const_default_func_args = bool::from_ocaml(*ocaml_opts.add(7)).unwrap(); let po_abstract_static_props = bool::from_ocaml(*ocaml_opts.add(8)).unwrap(); let po_disallow_func_ptrs_in_constants = bool::from_ocaml(*ocaml_opts.add(9)).unwrap(); let po_enable_xhp_class_modifier = bool::from_ocaml(*ocaml_opts.add(10)).unwrap(); let po_disable_xhp_element_mangling = bool::from_ocaml(*ocaml_opts.add(11)).unwrap(); let po_disable_xhp_children_declarations = bool::from_ocaml(*ocaml_opts.add(12)).unwrap(); let po_const_default_lambda_args = bool::from_ocaml(*ocaml_opts.add(13)).unwrap(); let po_allow_unstable_features = bool::from_ocaml(*ocaml_opts.add(14)).unwrap(); let po_interpret_soft_types_as_like_types = bool::from_ocaml(*ocaml_opts.add(15)).unwrap(); let tco_is_systemlib = bool::from_ocaml(*ocaml_opts.add(16)).unwrap(); let po_disallow_static_constants_in_default_func_args = bool::from_ocaml(*ocaml_opts.add(17)).unwrap(); parser_options.po_disable_lval_as_an_expression = po_disable_lval_as_an_expression; parser_options.po_disable_legacy_soft_typehints = po_disable_legacy_soft_typehints; parser_options.tco_const_static_props = tco_const_static_props; parser_options.po_disable_legacy_attribute_syntax = po_disable_legacy_attribute_syntax; parser_options.po_const_default_func_args = po_const_default_func_args; parser_options.po_abstract_static_props = po_abstract_static_props; parser_options.po_disallow_func_ptrs_in_constants = po_disallow_func_ptrs_in_constants; parser_options.po_enable_xhp_class_modifier = po_enable_xhp_class_modifier; parser_options.po_disable_xhp_element_mangling = po_disable_xhp_element_mangling; parser_options.po_disable_xhp_children_declarations = po_disable_xhp_children_declarations; parser_options.po_const_default_lambda_args = po_const_default_lambda_args; parser_options.po_allow_unstable_features = po_allow_unstable_features; parser_options.po_interpret_soft_types_as_like_types = po_interpret_soft_types_as_like_types; parser_options.tco_is_systemlib = tco_is_systemlib; parser_options.po_disallow_static_constants_in_default_func_args = po_disallow_static_constants_in_default_func_args; ( parser_options, (hhvm_compat_mode, hhi_mode, codegen, tco_is_systemlib), ) } ocaml_ffi! { fn drop_tree_positioned(ocaml_tree: usize) { unsafe { let pair = Box::from_raw(ocaml_tree as *mut (usize, usize)); let _ = Box::from_raw(pair.0 as *mut SyntaxTree<'_, PositionedSyntax<'_>, ()>); let _ = Box::from_raw(pair.1 as *mut Bump); } } fn rust_parser_errors_positioned( source_text: SourceText<'_>, ocaml_tree: usize, ocaml_parser_options: UnsafeOcamlPtr, ) -> Vec<SyntaxError> { let (parser_options, (hhvm_compat_mode, hhi_mode, codegen, is_systemlib)) = unsafe { parser_options_from_ocaml_only_for_parser_errors(ocaml_parser_options) }; let (tree, _arena) = unsafe { // A rust pointer of (&SyntaxTree, &Arena) is passed to Ocaml in rust_parser_ffi::parse, // Ocaml passes it back here // PLEASE ENSURE TYPE SAFETY MANUALLY!!! let pair = Box::from_raw(ocaml_tree as *mut (usize, usize)); let tree = <SyntaxTree<'_, PositionedSyntax<'_>, ()>>::ffi_pointer_into_boxed( pair.0, &source_text, ); let arena = Box::from_raw(pair.1 as *mut Bump); (tree, arena) }; let (errors, _) = rust_parser_errors::parse_errors( &tree, parser_options, hhvm_compat_mode, hhi_mode, codegen, is_systemlib, HashSet::default(), ); errors } }
TOML
hhvm/hphp/hack/src/parser/schema/Cargo.toml
# @generated by autocargo [package] name = "full_fidelity_schema_version_number" version = "0.0.0" edition = "2021" [lib] path = "full_fidelity_schema_version_number.rs"
hhvm/hphp/hack/src/parser/schema/dune
(library (name parser_schema_def) (wrapped false) (modules schema_definition token_schema_definition operator_schema_definition) (libraries collections)) (library (name parser_schema) (wrapped false) (modules full_fidelity_schema) (libraries parser_schema_def core) )
OCaml
hhvm/hphp/hack/src/parser/schema/full_fidelity_schema.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (* If you make changes to the schema that cause it to serialize / deserialize differently, please update this version number *) let full_fidelity_schema_version_number = "2023-07-25-0000" (* TODO: Consider basing the version number on an auto-generated hash of a file rather than relying on people remembering to update it. *) (* TODO: It may be worthwhile to investigate how Thrift describes data types and use that standard. *) include Operator_schema_definition include Token_schema_definition include Schema_definition let schema_map = let add map ({ kind_name; _ } as schema_node) = SMap.add kind_name schema_node map in List.fold_left add SMap.empty @@ { kind_name = "Token"; type_name = "Token.t"; func_name = "token"; description = "token"; prefix = "token"; aggregates = [Expression; Name]; fields = []; } :: { kind_name = "error"; type_name = "error"; func_name = "error"; description = "error"; prefix = "error"; aggregates = []; fields = []; } :: schema type trivia_node = { trivia_kind: string; trivia_text: string; } type transformation = { pattern: string; func: schema_node -> string; } type token_transformation = { token_pattern: string; token_func: token_node list -> string; } type trivia_transformation = { trivia_pattern: string; trivia_func: trivia_node list -> string; } type aggregate_transformation = { aggregate_pattern: string; aggregate_func: aggregate_type -> string; } type operator_transformation = { operator_pattern: string; operator_func: operator_node list -> string; } type template_file = { filename: string; template: string; transformations: transformation list; token_transformations: token_transformation list; token_no_text_transformations: token_transformation list; token_variable_text_transformations: token_transformation list; token_given_text_transformations: token_transformation list; trivia_transformations: trivia_transformation list; aggregate_transformations: aggregate_transformation list; operator_transformations: operator_transformation list; } let make_template_file ?(transformations = []) ?(token_transformations = []) ?(token_no_text_transformations = []) ?(token_given_text_transformations = []) ?(token_variable_text_transformations = []) ?(trivia_transformations = []) ?(aggregate_transformations = []) ?(operator_transformations = []) ~filename ~template () = { filename; template; transformations; token_transformations; token_no_text_transformations; token_given_text_transformations; token_variable_text_transformations; trivia_transformations; aggregate_transformations; operator_transformations; } module Language_flags = struct let php_and_hack = "php_and_hack" let hack_only = "hack_only" let is_hack_only : string -> bool = function | "php_and_hack" -> false | "hack_only" -> true | f -> failwith ("Unknown language flag " ^ f ^ " for token.") end module LF = Language_flags module Optional_flags = struct let xhp = "xhp" (* See documentation of token_node.allowed_as_identifier. *) let allowed_as_identifier = "allowed_as_identifier" let is_recognized : string -> bool = function | "xhp" | "allowed_as_identifier" -> true | _ -> false let is_xhp : string list -> bool = (fun flags -> List.mem xhp flags) let is_allowed_as_identifier : string list -> bool = function | flags -> List.mem allowed_as_identifier flags end module OF = Optional_flags let trivia_node_from_list l = match l with | [trivia_kind; trivia_text] -> { trivia_kind; trivia_text } | _ -> failwith "bad trivia schema" let trivia_kinds = List.map trivia_node_from_list [ ["WhiteSpace"; "whitespace"]; ["EndOfLine"; "end_of_line"]; ["DelimitedComment"; "delimited_comment"]; ["SingleLineComment"; "single_line_comment"]; ["FixMe"; "fix_me"]; ["IgnoreError"; "ignore_error"]; ["FallThrough"; "fall_through"]; ["ExtraTokenError"; "extra_token_error"]; ] let escape_token_text t = (* add one extra backslash because it is removed by Str.replace_first downstream *) if t = "\\" then "\\\\\\" else t let map_and_concat_separated separator f items = String.concat separator (List.map f items) let map_and_concat f items = map_and_concat_separated "" f items let filter_map_concat p f items = map_and_concat f (List.filter p items) let transform_schema f = map_and_concat f schema let transform_aggregate f = map_and_concat f generated_aggregate_types let replace pattern new_text source = Str.replace_first (Str.regexp pattern) new_text source let generate_string template = let syntax_folder s x = replace x.pattern (transform_schema x.func) s in let tokens_folder token_list s x = replace x.token_pattern (x.token_func token_list) s in let trivia_folder trivia_list s x = replace x.trivia_pattern (x.trivia_func trivia_list) s in let aggregate_folder s x = replace x.aggregate_pattern (transform_aggregate x.aggregate_func) s in let operator_folder operators s x = replace x.operator_pattern (x.operator_func operators) s in let result = List.fold_left syntax_folder template.template template.transformations in let result = List.fold_left (tokens_folder tokens) result template.token_transformations in let result = List.fold_left (tokens_folder no_text_tokens) result template.token_no_text_transformations in let result = List.fold_left (tokens_folder given_text_tokens) result template.token_given_text_transformations in let result = List.fold_left (tokens_folder variable_text_tokens) result template.token_variable_text_transformations in let result = List.fold_left (trivia_folder trivia_kinds) result template.trivia_transformations in let result = List.fold_left (operator_folder operators) result template.operator_transformations in let result = List.fold_left aggregate_folder result template.aggregate_transformations in result let format_ocaml src path : string = (* Write the string to a temporary file. *) let tmp_filename = Filename.temp_file "" (Filename.basename path) in let file = Core.Out_channel.create tmp_filename in Printf.fprintf file "%s" src; Core.Out_channel.close file; let ocamlformat_path = Option.value (Stdlib.Sys.getenv_opt "OCAMLFORMAT_PATH") ~default:"../tools/third-party/ocamlformat/ocamlformat" in (* Run ocamlformat on the file. *) let cmd = Printf.sprintf "%s -i --name=%s %s" ocamlformat_path path tmp_filename in ignore (Sys.command cmd); (* Read the formatted file, then delete it. *) let res = Core.In_channel.with_file tmp_filename ~f:(fun channel -> Core.In_channel.input_all channel) in Sys.remove tmp_filename; res let generate_formatted_string (template : template_file) : string = let open Core in let s = generate_string template in let has_suffix s = String.is_suffix template.filename ~suffix:s in if has_suffix ".ml" || has_suffix ".mli" then format_ocaml s template.filename else s let generate_file (template : template_file) : unit = let open Core in let filename = template.filename in let file = Out_channel.create filename in let s = generate_formatted_string template in Printf.fprintf file "%s" s; Out_channel.close file module GenerateFFJSONSchema = struct let to_json_trivia { trivia_kind; trivia_text } = Printf.sprintf " { \"trivia_kind_name\" : \"%s\", \"trivia_type_name\" : \"%s\" }" trivia_kind trivia_text let to_json_given_text x = Printf.sprintf " { \"token_kind\" : \"%s\", \"token_text\" : \"%s\" }, " x.token_kind (escape_token_text x.token_text) let to_json_variable_text x = Printf.sprintf " { \"token_kind\" : \"%s\", \"token_text\" : null }, " x.token_kind let to_json_ast_nodes x = let mapper (f, _) = Printf.sprintf "{ \"field_name\" : \"%s\" }" f in let fields = String.concat ",\n " (List.map mapper x.fields) in Printf.sprintf " { \"kind_name\" : \"%s\", \"type_name\" : \"%s\", \"description\" : \"%s\", \"prefix\" : \"%s\", \"fields\" : [ %s ] }, " x.kind_name x.type_name x.description x.prefix fields let full_fidelity_json_schema_template = "{ \"description\" : \"@" ^ "generated JSON schema of the Hack Full Fidelity Parser AST\", \"version\" : \"" ^ full_fidelity_schema_version_number ^ "\", \"trivia\" : [ TRIVIA_KINDS ], \"tokens\" : [ GIVEN_TEXT_TOKENS VARIABLE_TEXT_TOKENS { \"token_kind\" : \"EndOfFile\", \"token_text\" : null } ], \"AST\" : [ AST_NODES { \"kind_name\" : \"Token\", \"type_name\" : \"token\", \"description\" : \"token\", \"prefix\" : \"\", \"fields\" : [ { \"field_name\" : \"leading\" }, { \"field_name\" : \"trailing\" } ] }, { \"kind_name\" : \"Missing\", \"type_name\" : \"missing\", \"description\" : \"missing\", \"prefix\" : \"\", \"fields\" : [ ] }, { \"kind_name\" : \"SyntaxList\", \"type_name\" : \"syntax_list\", \"description\" : \"syntax_list\", \"prefix\" : \"\", \"fields\" : [ ] } ] }" let full_fidelity_json_schema = make_template_file ~transformations:[{ pattern = "AST_NODES"; func = to_json_ast_nodes }] ~token_given_text_transformations: [ { token_pattern = "GIVEN_TEXT_TOKENS"; token_func = map_and_concat to_json_given_text; }; ] ~token_variable_text_transformations: [ { token_pattern = "VARIABLE_TEXT_TOKENS"; token_func = map_and_concat to_json_variable_text; }; ] ~trivia_transformations: [ { trivia_pattern = "TRIVIA_KINDS"; trivia_func = map_and_concat_separated ",\n" to_json_trivia; }; ] ~template:full_fidelity_json_schema_template ~filename:"hphp/hack/src/parser/js/full_fidelity_schema.json" () end let schema_as_json () = generate_string GenerateFFJSONSchema.full_fidelity_json_schema
Rust
hhvm/hphp/hack/src/parser/schema/full_fidelity_schema_version_number.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ pub const VERSION: &str = "2023-07-25-0000";
OCaml
hhvm/hphp/hack/src/parser/schema/operator_schema_definition.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type operator_node = { name: string; is_assignment: bool; is_comparison: bool; } let make_operator ?(is_assignment = false) ?(is_comparison = false) name = { name; is_assignment; is_comparison } let operators = [ make_operator "Dollar"; make_operator "ScopeResolution"; make_operator "Indexing"; make_operator "NullSafeMemberSelection"; make_operator "MemberSelection"; make_operator "New"; make_operator "FunctionCall"; make_operator "Clone"; make_operator "PostfixIncrement"; make_operator "PostfixDecrement"; make_operator "PrefixIncrement"; make_operator "PrefixDecrement"; make_operator "Exponent"; make_operator "ErrorControl"; make_operator "Cast"; make_operator "NullableAs"; make_operator "Is"; make_operator "Instanceof"; make_operator "As"; make_operator "Upcast"; make_operator "UnaryPlus"; make_operator "UnaryMinus"; make_operator "Not"; make_operator "LogicalNot"; make_operator "Remainder"; make_operator "Multiplication"; make_operator "Division"; make_operator "Subtraction"; make_operator "Concatenation"; make_operator "Addition"; make_operator "RightShift"; make_operator "LeftShift"; make_operator "Spaceship" ~is_comparison:true; make_operator "LessThanOrEqual" ~is_comparison:true; make_operator "LessThan" ~is_comparison:true; make_operator "GreaterThanOrEqual" ~is_comparison:true; make_operator "GreaterThan" ~is_comparison:true; make_operator "StrictNotEqual" ~is_comparison:true; make_operator "StrictEqual" ~is_comparison:true; make_operator "NotEqual" ~is_comparison:true; make_operator "Equal" ~is_comparison:true; make_operator "And"; make_operator "ExclusiveOr"; make_operator "Or"; make_operator "LogicalAnd"; make_operator "LogicalOr"; make_operator "Coalesce"; make_operator "DegenerateConditional"; make_operator "ConditionalQuestion"; make_operator "ConditionalColon"; make_operator "Pipe"; make_operator "SubtractionAssignment" ~is_assignment:true; make_operator "RightShiftAssignment" ~is_assignment:true; make_operator "RemainderAssignment" ~is_assignment:true; make_operator "OrAssignment" ~is_assignment:true; make_operator "MultiplicationAssignment" ~is_assignment:true; make_operator "LeftShiftAssignment" ~is_assignment:true; make_operator "ExponentiationAssignment" ~is_assignment:true; make_operator "ExclusiveOrAssignment" ~is_assignment:true; make_operator "DivisionAssignment" ~is_assignment:true; make_operator "ConcatenationAssignment" ~is_assignment:true; make_operator "CoalesceAssignment" ~is_assignment:true; make_operator "Assignment" ~is_assignment:true; make_operator "AndAssignment" ~is_assignment:true; make_operator "AdditionAssignment" ~is_assignment:true; make_operator "Print"; make_operator "Require"; make_operator "RequireOnce"; make_operator "Include"; make_operator "IncludeOnce"; make_operator "Await"; make_operator "Readonly"; make_operator "EnumClassLabel"; make_operator "Package"; ]
OCaml
hhvm/hphp/hack/src/parser/schema/schema_definition.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type aggregate_type = | TopLevelDeclaration | Expression | Specifier | AttributeSpecification | Parameter | ClassBodyDeclaration | EnumClassBodyDeclaration | RefinementMember | Statement | SwitchLabel | LambdaBody | ConstructorExpression | NamespaceInternals | XHPAttribute | ObjectCreationWhat | Pattern | TODO | Name type child_spec = | Token (* Special case, since it's so common, synonym of `Just "Token"` *) | Just of string | Aggregate of aggregate_type | ZeroOrMore of child_spec | ZeroOrOne of child_spec type schema_node = { kind_name: string; type_name: string; func_name: string; description: string; prefix: string; aggregates: aggregate_type list; fields: (string * child_spec) list; } let schema : schema_node list = [ { kind_name = "EndOfFile"; type_name = "end_of_file"; func_name = "end_of_file"; description = "end_of_file"; prefix = "end_of_file"; aggregates = [TopLevelDeclaration; TODO]; fields = [("token", Token)]; }; { kind_name = "Script"; type_name = "script"; func_name = "script"; description = "script"; prefix = "script"; aggregates = []; fields = [("declarations", ZeroOrMore (Aggregate TopLevelDeclaration))]; }; { kind_name = "QualifiedName"; type_name = "qualified_name"; func_name = "qualified_name"; description = "qualified_name"; prefix = "qualified_name"; aggregates = [Name]; fields = [("parts", ZeroOrMore Token)]; }; { kind_name = "ModuleName"; type_name = "module_name"; func_name = "module_name"; description = "module_name"; prefix = "module_name"; aggregates = [Name]; fields = [("parts", ZeroOrMore Token)]; }; { kind_name = "SimpleTypeSpecifier"; type_name = "simple_type_specifier"; func_name = "simple_type_specifier"; description = "simple_type_specifier"; prefix = "simple_type"; aggregates = [Specifier]; fields = [("specifier", Aggregate Name)]; }; { kind_name = "LiteralExpression"; type_name = "literal_expression"; func_name = "literal_expression"; description = "literal"; prefix = "literal"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("expression", ZeroOrMore (Aggregate Expression))]; }; { kind_name = "PrefixedStringExpression"; type_name = "prefixed_string_expression"; func_name = "prefixed_string_expression"; description = "prefixed_string"; prefix = "prefixed_string"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("name", Token); ("str", Token)]; }; { kind_name = "PrefixedCodeExpression"; type_name = "prefixed_code_expression"; func_name = "prefixed_code_expression"; description = "prefixed_code"; prefix = "prefixed_code"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("prefix", Token); ("left_backtick", Token); ("body", Aggregate LambdaBody); ("right_backtick", Token); ]; }; { kind_name = "VariableExpression"; type_name = "variable_expression"; func_name = "variable_expression"; description = "variable"; prefix = "variable"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("expression", Token)]; }; { kind_name = "PipeVariableExpression"; type_name = "pipe_variable_expression"; func_name = "pipe_variable_expression"; description = "pipe_variable"; prefix = "pipe_variable"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("expression", Token)]; }; { kind_name = "FileAttributeSpecification"; type_name = "file_attribute_specification"; func_name = "file_attribute_specification"; description = "file_attribute_specification"; prefix = "file_attribute_specification"; aggregates = [TopLevelDeclaration]; fields = [ ("left_double_angle", Token); ("keyword", Token); ("colon", Token); ("attributes", ZeroOrMore (Just "ConstructorCall")); ("right_double_angle", Token); ]; } (* TODO: Make all uses of attribute_spec consistent in the API. *); { kind_name = "EnumDeclaration"; type_name = "enum_declaration"; func_name = "enum_declaration"; description = "enum_declaration"; prefix = "enum"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrOne Token); ("keyword", Token); ("name", Token); ("colon", Token); ("base", Aggregate Specifier); ("type", ZeroOrOne (Just "TypeConstraint")); ("left_brace", Token); ("use_clauses", ZeroOrMore (Just "EnumUse")); ("enumerators", ZeroOrMore (Just "Enumerator")); ("right_brace", Token); ]; }; { kind_name = "EnumUse"; type_name = "enum_use"; func_name = "enum_use"; description = "enum_use"; prefix = "enum_use"; aggregates = []; fields = [ ("keyword", Token); ("names", ZeroOrMore (Aggregate Specifier)); ("semicolon", Token); ]; }; { kind_name = "Enumerator"; type_name = "enumerator"; func_name = "enumerator"; description = "enumerator"; prefix = "enumerator"; aggregates = []; fields = [ ("name", Token); ("equal", Token); ("value", Aggregate Expression); ("semicolon", Token); ]; }; { kind_name = "EnumClassDeclaration"; type_name = "enum_class_declaration"; func_name = "enum_class_declaration"; description = "enum_class_declaration"; prefix = "enum_class"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrOne Token); ("enum_keyword", Token); ("class_keyword", Token); ("name", Token); ("colon", Token); ("base", Aggregate Specifier); ("extends", ZeroOrOne Token); ("extends_list", ZeroOrMore (Aggregate Specifier)); ("left_brace", Token); ("elements", ZeroOrMore (Aggregate EnumClassBodyDeclaration)); ("right_brace", Token); ]; }; { kind_name = "EnumClassEnumerator"; type_name = "enum_class_enumerator"; func_name = "enum_class_enumerator"; description = "enum_class_enumerator"; prefix = "enum_class_enumerator"; aggregates = [EnumClassBodyDeclaration]; fields = [ ("modifiers", ZeroOrOne Token); ("type", Aggregate Specifier); ("name", Token); ("initializer", ZeroOrOne (Just "SimpleInitializer")); ("semicolon", Token); ]; }; { kind_name = "AliasDeclaration"; type_name = "alias_declaration"; func_name = "alias_declaration"; description = "alias_declaration"; prefix = "alias"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrOne Token); ("module_kw_opt", ZeroOrOne Token); ("keyword", Token); ("name", ZeroOrOne Token); ("generic_parameter", ZeroOrOne (Just "TypeParameters")); ("constraint", ZeroOrOne (Just "TypeConstraint")); ("equal", ZeroOrOne Token); ("type", Aggregate Specifier); ("semicolon", Token); ]; }; { kind_name = "ContextAliasDeclaration"; type_name = "context_alias_declaration"; func_name = "context_alias_declaration"; description = "context_alias_declaration"; prefix = "ctx_alias"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("keyword", Token); ("name", ZeroOrOne Token); ("generic_parameter", ZeroOrOne (Just "TypeParameters")); ("as_constraint", ZeroOrOne (Just "ContextConstraint")); ("equal", ZeroOrOne Token); ("context", Aggregate Specifier); ("semicolon", Token); ]; }; { kind_name = "CaseTypeDeclaration"; type_name = "case_type_declaration"; func_name = "case_type_declaration"; description = "case_type_declaration"; prefix = "case_type"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrOne Token); ("case_keyword", Token); ("type_keyword", Token); ("name", Token); ("generic_parameter", ZeroOrOne (Just "TypeParameters")); ("as", ZeroOrOne Token); ("bounds", ZeroOrMore (Aggregate Specifier)); ("equal", Token); ("variants", ZeroOrMore (Aggregate Specifier)); ("semicolon", Token); ]; }; { kind_name = "CaseTypeVariant"; type_name = "case_type_variant"; func_name = "case_type_variant"; description = "case_type_variant"; prefix = "case_type_variant"; aggregates = []; fields = [("bar", ZeroOrOne Token); ("type", Aggregate Specifier)]; }; { kind_name = "PropertyDeclaration"; type_name = "property_declaration"; func_name = "property_declaration"; description = "property_declaration"; prefix = "property"; aggregates = [ClassBodyDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrMore Token); ("type", ZeroOrOne (Aggregate Specifier)); ("declarators", ZeroOrMore (Just "PropertyDeclarator")); ("semicolon", Token); ]; }; { kind_name = "PropertyDeclarator"; type_name = "property_declarator"; func_name = "property_declarator"; description = "property_declarator"; prefix = "property"; aggregates = []; fields = [("name", Token); ("initializer", ZeroOrOne (Just "SimpleInitializer"))]; }; { kind_name = "NamespaceDeclaration"; type_name = "namespace_declaration"; func_name = "namespace_declaration"; description = "namespace_declaration"; prefix = "namespace"; aggregates = [TopLevelDeclaration]; fields = [ ("header", Just "NamespaceDeclarationHeader"); ("body", Aggregate NamespaceInternals); ]; }; { kind_name = "NamespaceDeclarationHeader"; type_name = "namespace_declaration_header"; func_name = "namespace_declaration_header"; description = "namespace_declaration_header"; prefix = "namespace"; aggregates = [TopLevelDeclaration]; fields = [("keyword", Token); ("name", ZeroOrOne (Aggregate Name))]; }; { kind_name = "NamespaceBody"; type_name = "namespace_body"; func_name = "namespace_body"; description = "namespace_body"; prefix = "namespace"; aggregates = [NamespaceInternals]; fields = [ ("left_brace", Token); ("declarations", ZeroOrMore (Aggregate TopLevelDeclaration)); ("right_brace", Token); ]; }; { kind_name = "NamespaceEmptyBody"; type_name = "namespace_empty_body"; func_name = "namespace_empty_body"; description = "namespace_empty_body"; prefix = "namespace"; aggregates = [NamespaceInternals]; fields = [("semicolon", Token)]; }; { kind_name = "NamespaceUseDeclaration"; type_name = "namespace_use_declaration"; func_name = "namespace_use_declaration"; description = "namespace_use_declaration"; prefix = "namespace_use"; aggregates = [TopLevelDeclaration]; fields = [ ("keyword", Token); ("kind", ZeroOrOne Token); ("clauses", ZeroOrMore (Just "NamespaceUseClause")); ("semicolon", ZeroOrOne Token); ]; }; { kind_name = "NamespaceGroupUseDeclaration"; type_name = "namespace_group_use_declaration"; func_name = "namespace_group_use_declaration"; description = "namespace_group_use_declaration"; prefix = "namespace_group_use"; aggregates = [TopLevelDeclaration]; fields = [ ("keyword", Token); ("kind", ZeroOrOne Token); ("prefix", Aggregate Name); ("left_brace", Token); ("clauses", ZeroOrMore (Just "NamespaceUseClause")); ("right_brace", Token); ("semicolon", Token); ]; }; { kind_name = "NamespaceUseClause"; type_name = "namespace_use_clause"; func_name = "namespace_use_clause"; description = "namespace_use_clause"; prefix = "namespace_use"; aggregates = []; fields = [ ("clause_kind", ZeroOrOne Token); ("name", Aggregate Name); ("as", ZeroOrOne Token); ("alias", ZeroOrOne Token); ]; }; { kind_name = "FunctionDeclaration"; type_name = "function_declaration"; func_name = "function_declaration"; description = "function_declaration"; prefix = "function"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("declaration_header", Just "FunctionDeclarationHeader"); ("body", Just "CompoundStatement"); ]; }; { kind_name = "FunctionDeclarationHeader"; type_name = "function_declaration_header"; func_name = "function_declaration_header"; description = "function_declaration_header"; prefix = "function"; aggregates = []; fields = [ ("modifiers", ZeroOrMore Token); ("keyword", Token); ("name", Token); ("type_parameter_list", ZeroOrOne (Just "TypeParameters")); ("left_paren", Token); ("parameter_list", ZeroOrMore (Aggregate Parameter)); ("right_paren", Token); ("contexts", ZeroOrOne (Just "Contexts")); ("colon", ZeroOrOne Token); ("readonly_return", ZeroOrOne Token); ("type", ZeroOrOne (Just "AttributizedSpecifier")); ("where_clause", ZeroOrOne (Just "WhereClause")); ]; }; { kind_name = "Contexts"; type_name = "contexts"; func_name = "contexts"; description = "contexts"; prefix = "contexts"; aggregates = [Specifier]; fields = [ ("left_bracket", Token); ("types", ZeroOrMore (Aggregate Specifier)); ("right_bracket", Token); ]; }; { kind_name = "WhereClause"; type_name = "where_clause"; func_name = "where_clause"; description = "where_clause"; prefix = "where_clause"; aggregates = []; fields = [ ("keyword", Token); ("constraints", ZeroOrMore (Just "WhereConstraint")); ]; }; { kind_name = "WhereConstraint"; type_name = "where_constraint"; func_name = "where_constraint"; description = "where_constraint"; prefix = "where_constraint"; aggregates = []; fields = [ ("left_type", Aggregate Specifier); ("operator", Token); ("right_type", Aggregate Specifier); ]; }; { kind_name = "MethodishDeclaration"; type_name = "methodish_declaration"; func_name = "methodish_declaration"; description = "methodish_declaration"; prefix = "methodish"; aggregates = [ClassBodyDeclaration]; fields = [ ("attribute", ZeroOrOne (Aggregate AttributeSpecification)); ("function_decl_header", Just "FunctionDeclarationHeader"); ("function_body", ZeroOrOne (Just "CompoundStatement")); ("semicolon", ZeroOrOne Token); ]; }; { kind_name = "MethodishTraitResolution"; type_name = "methodish_trait_resolution"; func_name = "methodish_trait_resolution"; description = "methodish_trait_resolution"; prefix = "methodish_trait"; aggregates = [ClassBodyDeclaration]; fields = [ ("attribute", ZeroOrOne (Aggregate AttributeSpecification)); ("function_decl_header", Just "FunctionDeclarationHeader"); ("equal", Token); ("name", Aggregate Specifier); ("semicolon", Token); ]; }; { kind_name = "ClassishDeclaration"; type_name = "classish_declaration"; func_name = "classish_declaration"; description = "classish_declaration"; prefix = "classish"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrMore Token); ("xhp", ZeroOrOne Token); ("keyword", Token); ("name", Token); ("type_parameters", ZeroOrOne (Just "TypeParameters")); ("extends_keyword", ZeroOrOne Token); ("extends_list", ZeroOrMore (Aggregate Specifier)); ("implements_keyword", ZeroOrOne Token); ("implements_list", ZeroOrMore (Aggregate Specifier)); ("where_clause", ZeroOrOne (Just "WhereClause")); ("body", Just "ClassishBody"); ]; }; { kind_name = "ClassishBody"; type_name = "classish_body"; func_name = "classish_body"; description = "classish_body"; prefix = "classish_body"; aggregates = []; fields = [ ("left_brace", Token); ("elements", ZeroOrMore (Aggregate ClassBodyDeclaration)); ("right_brace", Token); ]; }; { kind_name = "TraitUse"; type_name = "trait_use"; func_name = "trait_use"; description = "trait_use"; prefix = "trait_use"; aggregates = []; fields = [ ("keyword", Token); ("names", ZeroOrMore (Aggregate Specifier)); ("semicolon", ZeroOrOne Token); ]; }; { kind_name = "RequireClause"; type_name = "require_clause"; func_name = "require_clause"; description = "require_clause"; prefix = "require"; aggregates = [ClassBodyDeclaration]; fields = [ ("keyword", Token); ("kind", Token); ("name", Aggregate Specifier); ("semicolon", Token); ]; }; { kind_name = "ConstDeclaration"; type_name = "const_declaration"; func_name = "const_declaration"; description = "const_declaration"; prefix = "const"; aggregates = [ClassBodyDeclaration; TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrMore Token); ("keyword", Token); ("type_specifier", ZeroOrOne (Aggregate Specifier)); ("declarators", ZeroOrMore (Just "ConstantDeclarator")); ("semicolon", Token); ]; }; { kind_name = "ConstantDeclarator"; type_name = "constant_declarator"; func_name = "constant_declarator"; description = "constant_declarator"; prefix = "constant_declarator"; aggregates = []; fields = [("name", Token); ("initializer", ZeroOrOne (Just "SimpleInitializer"))]; }; { kind_name = "TypeConstDeclaration"; type_name = "type_const_declaration"; func_name = "type_const_declaration"; description = "type_const_declaration"; prefix = "type_const"; aggregates = [ClassBodyDeclaration; EnumClassBodyDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("modifiers", ZeroOrOne Token); ("keyword", Token); ("type_keyword", Token); ("name", Token); ("type_parameters", ZeroOrOne (Just "TypeParameters")); ("type_constraints", ZeroOrMore (Just "TypeConstraint")); ("equal", ZeroOrOne Token); ("type_specifier", ZeroOrOne (Aggregate Specifier)); ("semicolon", Token); ]; }; { kind_name = "ContextConstDeclaration"; type_name = "context_const_declaration"; func_name = "context_const_declaration"; description = "context_const_declaration"; prefix = "context_const"; aggregates = [ClassBodyDeclaration]; fields = [ ("modifiers", ZeroOrOne Token); ("const_keyword", Token); ("ctx_keyword", Token); ("name", Token); ("type_parameters", ZeroOrOne (Just "TypeParameters")); ("constraint", ZeroOrMore (Just "ContextConstraint")); ("equal", ZeroOrOne Token); ("ctx_list", ZeroOrOne (Just "Contexts")); ("semicolon", Token); ]; }; { kind_name = "DecoratedExpression"; type_name = "decorated_expression"; func_name = "decorated_expression"; description = "decorated_expression"; prefix = "decorated_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("decorator", Token); ("expression", Aggregate Expression)]; }; { kind_name = "ParameterDeclaration"; type_name = "parameter_declaration"; func_name = "parameter_declaration"; description = "parameter_declaration"; prefix = "parameter"; aggregates = [Parameter]; fields = [ ("attribute", ZeroOrOne (Aggregate AttributeSpecification)); ("visibility", ZeroOrOne Token); ("call_convention", ZeroOrOne Token); ("readonly", ZeroOrOne Token); ("type", ZeroOrOne (Aggregate Specifier)); ("name", Aggregate Expression); ("default_value", ZeroOrOne (Just "SimpleInitializer")); ]; }; { kind_name = "VariadicParameter"; type_name = "variadic_parameter"; func_name = "variadic_parameter"; description = "variadic_parameter"; prefix = "variadic_parameter"; aggregates = [Specifier; Parameter]; fields = [ ("call_convention", ZeroOrOne Token); ("type", ZeroOrOne (Just "SimpleTypeSpecifier")); ("ellipsis", Token); ]; }; { kind_name = "OldAttributeSpecification"; type_name = "old_attribute_specification"; func_name = "old_attribute_specification"; description = "old_attribute_specification"; prefix = "old_attribute_specification"; aggregates = [AttributeSpecification]; fields = [ ("left_double_angle", Token); ("attributes", ZeroOrMore (Just "ConstructorCall")); ("right_double_angle", Token); ]; }; { kind_name = "AttributeSpecification"; type_name = "attribute_specification"; func_name = "attribute_specification"; description = "attribute_specification"; prefix = "attribute_specification"; aggregates = [AttributeSpecification]; fields = [("attributes", ZeroOrMore (Just "Attribute"))]; }; { kind_name = "Attribute"; type_name = "attribute"; func_name = "attribute"; description = "attribute"; prefix = "attribute"; aggregates = []; fields = [("at", Token); ("attribute_name", Just "ConstructorCall")]; }; { kind_name = "InclusionExpression"; type_name = "inclusion_expression"; func_name = "inclusion_expression"; description = "inclusion_expression"; prefix = "inclusion"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("require", Token); ("filename", Aggregate Expression)]; }; { kind_name = "InclusionDirective"; type_name = "inclusion_directive"; func_name = "inclusion_directive"; description = "inclusion_directive"; prefix = "inclusion"; aggregates = [TopLevelDeclaration; Statement]; fields = [("expression", Just "InclusionExpression"); ("semicolon", Token)]; }; { kind_name = "CompoundStatement"; type_name = "compound_statement"; func_name = "compound_statement"; description = "compound_statement"; prefix = "compound"; aggregates = [TopLevelDeclaration; Statement; LambdaBody]; fields = [ ("left_brace", Token); ("statements", ZeroOrMore (Aggregate Statement)); ("right_brace", Token); ]; }; { kind_name = "ExpressionStatement"; type_name = "expression_statement"; func_name = "expression_statement"; description = "expression_statement"; prefix = "expression_statement"; aggregates = [TopLevelDeclaration; Statement]; fields = [("expression", ZeroOrOne (Aggregate Expression)); ("semicolon", Token)]; }; { kind_name = "MarkupSection"; type_name = "markup_section"; func_name = "markup_section"; description = "markup_section"; prefix = "markup"; aggregates = [TopLevelDeclaration; Statement]; fields = [("hashbang", Token); ("suffix", ZeroOrOne (Just "MarkupSuffix"))]; }; { kind_name = "MarkupSuffix"; type_name = "markup_suffix"; func_name = "markup_suffix"; description = "markup_suffix"; prefix = "markup_suffix"; aggregates = [TopLevelDeclaration; Statement]; fields = [("less_than_question", Token); ("name", ZeroOrOne Token)]; }; { kind_name = "UnsetStatement"; type_name = "unset_statement"; func_name = "unset_statement"; description = "unset_statement"; prefix = "unset"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("variables", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ("semicolon", Token); ]; }; { kind_name = "DeclareLocalStatement"; type_name = "declare_local_statement"; func_name = "declare_local_statement"; description = "declare_local_statement"; prefix = "declare_local"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("variable", Aggregate Expression); ("colon", Token); ("type", Aggregate Specifier); ("initializer", ZeroOrOne (Just "SimpleInitializer")); ("semicolon", Token); ]; }; { kind_name = "UsingStatementBlockScoped"; type_name = "using_statement_block_scoped"; func_name = "using_statement_block_scoped"; description = "using_statement_block_scoped"; prefix = "using_block"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("await_keyword", ZeroOrOne Token); ("using_keyword", Token); ("left_paren", Token); ("expressions", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ("body", Aggregate Statement); ]; }; { kind_name = "UsingStatementFunctionScoped"; type_name = "using_statement_function_scoped"; func_name = "using_statement_function_scoped"; description = "using_statement_function_scoped"; prefix = "using_function"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("await_keyword", ZeroOrOne Token); ("using_keyword", Token); ("expression", Aggregate Expression); ("semicolon", Token); ]; }; { kind_name = "WhileStatement"; type_name = "while_statement"; func_name = "while_statement"; description = "while_statement"; prefix = "while"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("condition", Aggregate Expression); ("right_paren", Token); ("body", Aggregate Statement); ]; }; { kind_name = "IfStatement"; type_name = "if_statement"; func_name = "if_statement"; description = "if_statement"; prefix = "if"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("condition", Aggregate Expression); ("right_paren", Token); ("statement", Aggregate Statement); ("else_clause", ZeroOrOne (Just "ElseClause")); ]; }; { kind_name = "ElseClause"; type_name = "else_clause"; func_name = "else_clause"; description = "else_clause"; prefix = "else"; aggregates = []; fields = [("keyword", Token); ("statement", Aggregate Statement)]; }; { kind_name = "TryStatement"; type_name = "try_statement"; func_name = "try_statement"; description = "try_statement"; prefix = "try"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("compound_statement", Just "CompoundStatement"); ("catch_clauses", ZeroOrMore (Just "CatchClause")); ("finally_clause", ZeroOrOne (Just "FinallyClause")); ]; }; { kind_name = "CatchClause"; type_name = "catch_clause"; func_name = "catch_clause"; description = "catch_clause"; prefix = "catch"; aggregates = []; fields = [ ("keyword", Token); ("left_paren", Token); ("type", Just "SimpleTypeSpecifier"); ("variable", Token); ("right_paren", Token); ("body", Just "CompoundStatement"); ]; }; { kind_name = "FinallyClause"; type_name = "finally_clause"; func_name = "finally_clause"; description = "finally_clause"; prefix = "finally"; aggregates = []; fields = [("keyword", Token); ("body", Just "CompoundStatement")]; }; { kind_name = "DoStatement"; type_name = "do_statement"; func_name = "do_statement"; description = "do_statement"; prefix = "do"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("body", Aggregate Statement); ("while_keyword", Token); ("left_paren", Token); ("condition", Aggregate Expression); ("right_paren", Token); ("semicolon", Token); ]; }; { kind_name = "ForStatement"; type_name = "for_statement"; func_name = "for_statement"; description = "for_statement"; prefix = "for"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("initializer", ZeroOrMore (Aggregate Expression)); ("first_semicolon", Token); ("control", ZeroOrMore (Aggregate Expression)); ("second_semicolon", Token); ("end_of_loop", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ("body", Aggregate Statement); ]; }; { kind_name = "ForeachStatement"; type_name = "foreach_statement"; func_name = "foreach_statement"; description = "foreach_statement"; prefix = "foreach"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("collection", Aggregate Expression); ("await_keyword", ZeroOrOne Token); ("as", Token); ("key", ZeroOrOne (Aggregate Expression)); ("arrow", ZeroOrOne Token); ("value", Aggregate Expression); ("right_paren", Token); ("body", Aggregate Statement); ]; }; { kind_name = "SwitchStatement"; type_name = "switch_statement"; func_name = "switch_statement"; description = "switch_statement"; prefix = "switch"; aggregates = [Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("expression", Aggregate Expression); ("right_paren", Token); ("left_brace", Token); ("sections", ZeroOrMore (Just "SwitchSection")); ("right_brace", Token); ]; }; { kind_name = "SwitchSection"; type_name = "switch_section"; func_name = "switch_section"; description = "switch_section"; prefix = "switch_section"; aggregates = []; fields = [ ("labels", ZeroOrMore (Aggregate SwitchLabel)); ("statements", ZeroOrMore (Aggregate TopLevelDeclaration)); ("fallthrough", ZeroOrOne (Just "SwitchFallthrough")); ]; }; { kind_name = "SwitchFallthrough"; type_name = "switch_fallthrough"; func_name = "switch_fallthrough"; description = "switch_fallthrough"; prefix = "fallthrough"; aggregates = [TopLevelDeclaration; Statement]; fields = [("keyword", Token); ("semicolon", Token)]; }; { kind_name = "CaseLabel"; type_name = "case_label"; func_name = "case_label"; description = "case_label"; prefix = "case"; aggregates = [SwitchLabel]; fields = [ ("keyword", Token); ("expression", Aggregate Expression); ("colon", Token); ]; }; { kind_name = "DefaultLabel"; type_name = "default_label"; func_name = "default_label"; description = "default_label"; prefix = "default"; aggregates = [SwitchLabel]; fields = [("keyword", Token); ("colon", Token)]; }; { kind_name = "MatchStatement"; type_name = "match_statement"; func_name = "match_statement"; description = "match_statement"; prefix = "match_statement"; aggregates = [Statement]; fields = [ ("keyword", Token); ("left_paren", Token); ("expression", Aggregate Expression); ("right_paren", Token); ("left_brace", Token); ("arms", ZeroOrMore (Just "MatchStatementArm")); ("right_brace", Token); ]; }; { kind_name = "MatchStatementArm"; type_name = "match_statement_arm"; func_name = "match_statement_arm"; description = "match_statement_arm"; prefix = "match_statement_arm"; aggregates = []; fields = [ ("pattern", Aggregate Pattern); ("arrow", Token); ("body", Aggregate Statement); ]; }; { kind_name = "ReturnStatement"; type_name = "return_statement"; func_name = "return_statement"; description = "return_statement"; prefix = "return"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("expression", ZeroOrOne (Aggregate Expression)); ("semicolon", ZeroOrOne Token); ]; }; { kind_name = "YieldBreakStatement"; type_name = "yield_break_statement"; func_name = "yield_break_statement"; description = "yield_break_statement"; prefix = "yield_break"; aggregates = [TopLevelDeclaration; Statement]; fields = [("keyword", Token); ("break", Token); ("semicolon", Token)]; }; { kind_name = "ThrowStatement"; type_name = "throw_statement"; func_name = "throw_statement"; description = "throw_statement"; prefix = "throw"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("expression", Aggregate Expression); ("semicolon", Token); ]; }; { kind_name = "BreakStatement"; type_name = "break_statement"; func_name = "break_statement"; description = "break_statement"; prefix = "break"; aggregates = [TopLevelDeclaration; Statement]; fields = [("keyword", Token); ("semicolon", Token)]; }; { kind_name = "ContinueStatement"; type_name = "continue_statement"; func_name = "continue_statement"; description = "continue_statement"; prefix = "continue"; aggregates = [TopLevelDeclaration; Statement]; fields = [("keyword", Token); ("semicolon", Token)]; }; { kind_name = "EchoStatement"; type_name = "echo_statement"; func_name = "echo_statement"; description = "echo_statement"; prefix = "echo"; aggregates = [TopLevelDeclaration; Statement]; fields = [ ("keyword", Token); ("expressions", ZeroOrMore (Aggregate Expression)); ("semicolon", Token); ]; }; { kind_name = "ConcurrentStatement"; type_name = "concurrent_statement"; func_name = "concurrent_statement"; description = "concurrent_statement"; prefix = "concurrent"; aggregates = [Statement]; fields = [("keyword", Token); ("statement", Aggregate Statement)]; }; { kind_name = "SimpleInitializer"; type_name = "simple_initializer"; func_name = "simple_initializer"; description = "simple_initializer"; prefix = "simple_initializer"; aggregates = []; fields = [("equal", Token); ("value", Aggregate Expression)]; }; { kind_name = "AnonymousClass"; type_name = "anonymous_class"; func_name = "anonymous_class"; description = "anonymous_class"; prefix = "anonymous_class"; aggregates = [ObjectCreationWhat]; fields = [ ("class_keyword", Token); ("left_paren", ZeroOrOne Token); ("argument_list", ZeroOrMore (Aggregate Expression)); ("right_paren", ZeroOrOne Token); ("extends_keyword", ZeroOrOne Token); ("extends_list", ZeroOrMore (Aggregate Specifier)); ("implements_keyword", ZeroOrOne Token); ("implements_list", ZeroOrMore (Aggregate Specifier)); ("body", Just "ClassishBody"); ]; }; { kind_name = "AnonymousFunction"; type_name = "anonymous_function"; func_name = "anonymous_function"; description = "anonymous_function"; prefix = "anonymous"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("async_keyword", ZeroOrOne Token); ("function_keyword", Token); ("left_paren", Token); ("parameters", ZeroOrMore (Aggregate Parameter)); ("right_paren", Token); ("ctx_list", ZeroOrOne (Just "Contexts")); ("colon", ZeroOrOne Token); ("readonly_return", ZeroOrOne Token); ("type", ZeroOrOne (Aggregate Specifier)); ("use", ZeroOrOne (Just "AnonymousFunctionUseClause")); ("body", Just "CompoundStatement"); ]; }; { kind_name = "AnonymousFunctionUseClause"; type_name = "anonymous_function_use_clause"; func_name = "anonymous_function_use_clause"; description = "anonymous_function_use_clause"; prefix = "anonymous_use"; aggregates = []; fields = [ ("keyword", Token); ("left_paren", Token); ("variables", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; }; { kind_name = "VariablePattern"; type_name = "variable_pattern"; func_name = "variable_pattern"; description = "variable_pattern"; prefix = "variable_pattern"; aggregates = [Pattern]; fields = [("variable", Token)]; }; { kind_name = "ConstructorPattern"; type_name = "constructor_pattern"; func_name = "constructor_pattern"; description = "constructor_pattern"; prefix = "constructor_pattern"; aggregates = [Pattern]; fields = [ ("constructor", Aggregate Name); ("left_paren", Token); ("members", ZeroOrMore (Aggregate Pattern)); ("right_paren", Token); ]; }; { kind_name = "RefinementPattern"; type_name = "refinement_pattern"; func_name = "refinement_pattern"; description = "refinement_pattern"; prefix = "refinement_pattern"; aggregates = [Pattern]; fields = [ ("variable", Token); ("colon", Token); ("specifier", Aggregate Specifier); ]; }; { kind_name = "LambdaExpression"; type_name = "lambda_expression"; func_name = "lambda_expression"; description = "lambda_expression"; prefix = "lambda"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("async", ZeroOrOne Token); ("signature", Aggregate Specifier); ("arrow", Token); ("body", Aggregate LambdaBody); ]; }; { kind_name = "LambdaSignature"; type_name = "lambda_signature"; func_name = "lambda_signature"; description = "lambda_signature"; prefix = "lambda"; aggregates = [Specifier]; fields = [ ("left_paren", Token); ("parameters", ZeroOrMore (Aggregate Parameter)); ("right_paren", Token); ("contexts", ZeroOrOne (Just "Contexts")); ("colon", ZeroOrOne Token); ("readonly_return", ZeroOrOne Token); ("type", ZeroOrOne (Aggregate Specifier)); ]; }; { kind_name = "CastExpression"; type_name = "cast_expression"; func_name = "cast_expression"; description = "cast_expression"; prefix = "cast"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_paren", Token); ("type", Token); ("right_paren", Token); ("operand", Aggregate Expression); ]; }; { kind_name = "ScopeResolutionExpression"; type_name = "scope_resolution_expression"; func_name = "scope_resolution_expression"; description = "scope_resolution_expression"; prefix = "scope_resolution"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("qualifier", Aggregate Expression); ("operator", Token); ("name", Aggregate Expression); ]; }; { kind_name = "MemberSelectionExpression"; type_name = "member_selection_expression"; func_name = "member_selection_expression"; description = "member_selection_expression"; prefix = "member"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("object", Aggregate Expression); ("operator", Token); ("name", Token)]; }; { kind_name = "SafeMemberSelectionExpression"; type_name = "safe_member_selection_expression"; func_name = "safe_member_selection_expression"; description = "safe_member_selection_expression"; prefix = "safe_member"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("object", Aggregate Expression); ("operator", Token); ("name", Token)]; }; { kind_name = "EmbeddedMemberSelectionExpression"; type_name = "embedded_member_selection_expression"; func_name = "embedded_member_selection_expression"; description = "embedded_member_selection_expression"; prefix = "embedded_member"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("object", Just "VariableExpression"); ("operator", Token); ("name", Token); ]; }; { kind_name = "YieldExpression"; type_name = "yield_expression"; func_name = "yield_expression"; description = "yield_expression"; prefix = "yield"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("keyword", Token); ("operand", Aggregate ConstructorExpression)]; }; { kind_name = "PrefixUnaryExpression"; type_name = "prefix_unary_expression"; func_name = "prefix_unary_expression"; description = "prefix_unary_expression"; prefix = "prefix_unary"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("operator", Token); ("operand", Aggregate Expression)]; }; { kind_name = "PostfixUnaryExpression"; type_name = "postfix_unary_expression"; func_name = "postfix_unary_expression"; description = "postfix_unary_expression"; prefix = "postfix_unary"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("operand", Aggregate Expression); ("operator", Token)]; }; { kind_name = "BinaryExpression"; type_name = "binary_expression"; func_name = "binary_expression"; description = "binary_expression"; prefix = "binary"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_operand", Aggregate Expression); ("operator", Token); ("right_operand", Aggregate Expression); ]; }; { kind_name = "IsExpression"; type_name = "is_expression"; func_name = "is_expression"; description = "is_expression"; prefix = "is"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_operand", Aggregate Expression); ("operator", Token); ("right_operand", Aggregate Specifier); ]; }; { kind_name = "AsExpression"; type_name = "as_expression"; func_name = "as_expression"; description = "as_expression"; prefix = "as"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_operand", Aggregate Expression); ("operator", Token); ("right_operand", Aggregate Specifier); ]; }; { kind_name = "NullableAsExpression"; type_name = "nullable_as_expression"; func_name = "nullable_as_expression"; description = "nullable_as_expression"; prefix = "nullable_as"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_operand", Aggregate Expression); ("operator", Token); ("right_operand", Aggregate Specifier); ]; }; { kind_name = "UpcastExpression"; type_name = "upcast_expression"; func_name = "upcast_expression"; description = "upcast_expression"; prefix = "upcast"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_operand", Aggregate Expression); ("operator", Token); ("right_operand", Aggregate Specifier); ]; }; { kind_name = "ConditionalExpression"; type_name = "conditional_expression"; func_name = "conditional_expression"; description = "conditional_expression"; prefix = "conditional"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("test", Aggregate Expression); ("question", Token); ("consequence", ZeroOrOne (Aggregate Expression)); ("colon", Token); ("alternative", Aggregate Expression); ]; }; { kind_name = "EvalExpression"; type_name = "eval_expression"; func_name = "eval_expression"; description = "eval_expression"; prefix = "eval"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("left_paren", Token); ("argument", Aggregate Expression); ("right_paren", Token); ]; }; { kind_name = "IssetExpression"; type_name = "isset_expression"; func_name = "isset_expression"; description = "isset_expression"; prefix = "isset"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("left_paren", Token); ("argument_list", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; }; { kind_name = "FunctionCallExpression"; type_name = "function_call_expression"; func_name = "function_call_expression"; description = "function_call_expression"; prefix = "function_call"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("receiver", Aggregate Expression); ("type_args", ZeroOrOne (Just "TypeArguments")); ("left_paren", Token); ("argument_list", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; }; { kind_name = "FunctionPointerExpression"; type_name = "function_pointer_expression"; func_name = "function_pointer_expression"; description = "function_pointer_expression"; prefix = "function_pointer"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("receiver", Aggregate Expression); ("type_args", Just "TypeArguments"); ]; }; { kind_name = "ParenthesizedExpression"; type_name = "parenthesized_expression"; func_name = "parenthesized_expression"; description = "parenthesized_expression"; prefix = "parenthesized_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_paren", Token); ("expression", Aggregate Expression); ("right_paren", Token); ]; }; { kind_name = "BracedExpression"; type_name = "braced_expression"; func_name = "braced_expression"; description = "braced_expression"; prefix = "braced_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_brace", Token); ("expression", Aggregate Expression); ("right_brace", Token); ]; }; { kind_name = "ETSpliceExpression"; type_name = "et_splice_expression"; func_name = "et_splice_expression"; description = "et_splice_expression"; prefix = "et_splice_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("dollar", Token); ("left_brace", Token); ("expression", Aggregate Expression); ("right_brace", Token); ]; }; { kind_name = "EmbeddedBracedExpression"; type_name = "embedded_braced_expression"; func_name = "embedded_braced_expression"; description = "embedded_braced_expression"; prefix = "embedded_braced_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_brace", Token); ("expression", Aggregate Expression); ("right_brace", Token); ]; }; { kind_name = "ListExpression"; type_name = "list_expression"; func_name = "list_expression"; description = "list_expression"; prefix = "list"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("left_paren", Token); ("members", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; }; { kind_name = "CollectionLiteralExpression"; type_name = "collection_literal_expression"; func_name = "collection_literal_expression"; description = "collection_literal_expression"; prefix = "collection_literal"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("name", Aggregate Specifier); ("left_brace", Token); ("initializers", ZeroOrMore (Aggregate ConstructorExpression)); ("right_brace", Token); ]; }; { kind_name = "ObjectCreationExpression"; type_name = "object_creation_expression"; func_name = "object_creation_expression"; description = "object_creation_expression"; prefix = "object_creation"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [("new_keyword", Token); ("object", Aggregate ObjectCreationWhat)]; }; { kind_name = "ConstructorCall"; type_name = "constructor_call"; func_name = "constructor_call"; description = "constructor_call"; prefix = "constructor_call"; aggregates = [ObjectCreationWhat]; fields = [ ("type", Aggregate TODO); ("left_paren", ZeroOrOne Token); ("argument_list", ZeroOrMore (Aggregate Expression)); ("right_paren", ZeroOrOne Token); ]; }; { kind_name = "DarrayIntrinsicExpression"; type_name = "darray_intrinsic_expression"; func_name = "darray_intrinsic_expression"; description = "darray_intrinsic_expression"; prefix = "darray_intrinsic"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("explicit_type", ZeroOrOne (Just "TypeArguments")); ("left_bracket", Token); ("members", ZeroOrMore (Just "ElementInitializer")); ("right_bracket", Token); ]; }; { kind_name = "DictionaryIntrinsicExpression"; type_name = "dictionary_intrinsic_expression"; func_name = "dictionary_intrinsic_expression"; description = "dictionary_intrinsic_expression"; prefix = "dictionary_intrinsic"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("explicit_type", ZeroOrOne (Just "TypeArguments")); ("left_bracket", Token); ("members", ZeroOrMore (Just "ElementInitializer")); ("right_bracket", Token); ]; }; { kind_name = "KeysetIntrinsicExpression"; type_name = "keyset_intrinsic_expression"; func_name = "keyset_intrinsic_expression"; description = "keyset_intrinsic_expression"; prefix = "keyset_intrinsic"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("explicit_type", ZeroOrOne (Just "TypeArguments")); ("left_bracket", Token); ("members", ZeroOrMore (Aggregate Expression)); ("right_bracket", Token); ]; }; { kind_name = "VarrayIntrinsicExpression"; type_name = "varray_intrinsic_expression"; func_name = "varray_intrinsic_expression"; description = "varray_intrinsic_expression"; prefix = "varray_intrinsic"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("explicit_type", ZeroOrOne (Just "TypeArguments")); ("left_bracket", Token); ("members", ZeroOrMore (Aggregate Expression)); ("right_bracket", Token); ]; }; { kind_name = "VectorIntrinsicExpression"; type_name = "vector_intrinsic_expression"; func_name = "vector_intrinsic_expression"; description = "vector_intrinsic_expression"; prefix = "vector_intrinsic"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("explicit_type", ZeroOrOne (Just "TypeArguments")); ("left_bracket", Token); ("members", ZeroOrMore (Aggregate Expression)); ("right_bracket", Token); ]; }; { kind_name = "ElementInitializer"; type_name = "element_initializer"; func_name = "element_initializer"; description = "element_initializer"; prefix = "element"; aggregates = [ConstructorExpression]; fields = [ ("key", Aggregate Expression); ("arrow", Token); ("value", Aggregate Expression); ]; }; { kind_name = "SubscriptExpression"; type_name = "subscript_expression"; func_name = "subscript_expression"; description = "subscript_expression"; prefix = "subscript"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("receiver", Aggregate Expression); ("left_bracket", Token); ("index", ZeroOrOne (Aggregate Expression)); ("right_bracket", Token); ]; }; { kind_name = "EmbeddedSubscriptExpression"; type_name = "embedded_subscript_expression"; func_name = "embedded_subscript_expression"; description = "embedded_subscript_expression"; prefix = "embedded_subscript"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("receiver", Just "VariableExpression"); ("left_bracket", Token); ("index", Aggregate Expression); ("right_bracket", Token); ]; }; { kind_name = "AwaitableCreationExpression"; type_name = "awaitable_creation_expression"; func_name = "awaitable_creation_expression"; description = "awaitable_creation_expression"; prefix = "awaitable"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("async", Token); ("compound_statement", Just "CompoundStatement"); ]; }; { kind_name = "XHPChildrenDeclaration"; type_name = "xhp_children_declaration"; func_name = "xhp_children_declaration"; description = "xhp_children_declaration"; prefix = "xhp_children"; aggregates = [ClassBodyDeclaration]; fields = [ ("keyword", Token); ("expression", Aggregate Expression); ("semicolon", Token); ]; }; { kind_name = "XHPChildrenParenthesizedList"; type_name = "xhp_children_parenthesized_list"; func_name = "xhp_children_parenthesized_list"; description = "xhp_children_parenthesized_list"; prefix = "xhp_children_list"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("left_paren", Token); ("xhp_children", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; }; { kind_name = "XHPCategoryDeclaration"; type_name = "xhp_category_declaration"; func_name = "xhp_category_declaration"; description = "xhp_category_declaration"; prefix = "xhp_category"; aggregates = [ClassBodyDeclaration]; fields = [ ("keyword", Token); ("categories", ZeroOrMore Token); ("semicolon", Token); ]; }; { kind_name = "XHPEnumType"; type_name = "xhp_enum_type"; func_name = "xhp_enum_type"; description = "xhp_enum_type"; prefix = "xhp_enum"; aggregates = [Specifier]; fields = [ ("like", ZeroOrOne Token); ("keyword", Token); ("left_brace", Token); ("values", ZeroOrMore (Just "LiteralExpression")); ("right_brace", Token); ]; }; { kind_name = "XHPLateinit"; type_name = "xhp_lateinit"; func_name = "xhp_lateinit"; description = "xhp_lateinit"; prefix = "xhp_lateinit"; aggregates = []; fields = [("at", Token); ("keyword", Token)]; }; { kind_name = "XHPRequired"; type_name = "xhp_required"; func_name = "xhp_required"; description = "xhp_required"; prefix = "xhp_required"; aggregates = []; fields = [("at", Token); ("keyword", Token)]; }; { kind_name = "XHPClassAttributeDeclaration"; type_name = "xhp_class_attribute_declaration"; func_name = "xhp_class_attribute_declaration"; description = "xhp_class_attribute_declaration"; prefix = "xhp_attribute"; aggregates = [ClassBodyDeclaration]; fields = [ ("keyword", Token); ("attributes", ZeroOrMore (Aggregate TODO)); ("semicolon", Token); ]; }; { kind_name = "XHPClassAttribute"; type_name = "xhp_class_attribute"; func_name = "xhp_class_attribute"; description = "xhp_class_attribute"; prefix = "xhp_attribute_decl"; aggregates = []; fields = [ ("type", Aggregate Specifier); ("name", Token); ("initializer", ZeroOrOne (Just "SimpleInitializer")); ("required", ZeroOrOne (Just "XHPRequired")); ]; }; { kind_name = "XHPSimpleClassAttribute"; type_name = "xhp_simple_class_attribute"; func_name = "xhp_simple_class_attribute"; description = "xhp_simple_class_attribute"; prefix = "xhp_simple_class_attribute"; aggregates = []; fields = [("type", Just "SimpleTypeSpecifier")]; }; { kind_name = "XHPSimpleAttribute"; type_name = "xhp_simple_attribute"; func_name = "xhp_simple_attribute"; description = "xhp_simple_attribute"; prefix = "xhp_simple_attribute"; aggregates = [XHPAttribute]; fields = [ ("name", Token); ("equal", Token); ("expression", Aggregate Expression); ]; }; { kind_name = "XHPSpreadAttribute"; type_name = "xhp_spread_attribute"; func_name = "xhp_spread_attribute"; description = "xhp_spread_attribute"; prefix = "xhp_spread_attribute"; aggregates = [XHPAttribute]; fields = [ ("left_brace", Token); ("spread_operator", Token); ("expression", Aggregate Expression); ("right_brace", Token); ]; }; { kind_name = "XHPOpen"; type_name = "xhp_open"; func_name = "xhp_open"; description = "xhp_open"; prefix = "xhp_open"; aggregates = []; fields = [ ("left_angle", Token); ("name", Token); ("attributes", ZeroOrMore (Aggregate XHPAttribute)); ("right_angle", Token); ]; }; { kind_name = "XHPExpression"; type_name = "xhp_expression"; func_name = "xhp_expression"; description = "xhp_expression"; prefix = "xhp"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("open", Just "XHPOpen"); ("body", ZeroOrMore (Aggregate Expression)); ("close", ZeroOrOne (Just "XHPClose")); ]; }; { kind_name = "XHPClose"; type_name = "xhp_close"; func_name = "xhp_close"; description = "xhp_close"; prefix = "xhp_close"; aggregates = []; fields = [("left_angle", Token); ("name", Token); ("right_angle", Token)]; }; { kind_name = "TypeConstant"; type_name = "type_constant"; func_name = "type_constant"; description = "type_constant"; prefix = "type_constant"; aggregates = [Statement]; fields = [ ("left_type", Aggregate Specifier); ("separator", Token); ("right_type", Token); ]; }; { kind_name = "VectorTypeSpecifier"; type_name = "vector_type_specifier"; func_name = "vector_type_specifier"; description = "vector_type_specifier"; prefix = "vector_type"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("type", Aggregate Specifier); ("trailing_comma", ZeroOrOne Token); ("right_angle", Token); ]; }; { kind_name = "KeysetTypeSpecifier"; type_name = "keyset_type_specifier"; func_name = "keyset_type_specifier"; description = "keyset_type_specifier"; prefix = "keyset_type"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("type", Aggregate Specifier); ("trailing_comma", ZeroOrOne Token); ("right_angle", Token); ]; }; { kind_name = "TupleTypeExplicitSpecifier"; type_name = "tuple_type_explicit_specifier"; func_name = "tuple_type_explicit_specifier"; description = "tuple_type_explicit_specifier"; prefix = "tuple_type"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("types", Just "SimpleTypeSpecifier"); ("right_angle", Token); ]; }; { kind_name = "VarrayTypeSpecifier"; type_name = "varray_type_specifier"; func_name = "varray_type_specifier"; description = "varray_type_specifier"; prefix = "varray"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("type", Just "SimpleTypeSpecifier"); ("trailing_comma", ZeroOrOne Token); ("right_angle", Token); ]; }; { kind_name = "FunctionCtxTypeSpecifier"; type_name = "function_ctx_type_specifier"; func_name = "function_ctx_type_specifier"; description = "function_ctx_type_specifier"; prefix = "function_ctx_type"; aggregates = [Specifier]; fields = [("keyword", Token); ("variable", Just "VariableExpression")]; }; { kind_name = "TypeParameter"; type_name = "type_parameter"; func_name = "type_parameter"; description = "type_parameter"; prefix = "type"; aggregates = []; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("reified", ZeroOrOne Token); ("variance", ZeroOrOne Token); ("name", Token); ("param_params", ZeroOrOne (Just "TypeParameters")); ("constraints", ZeroOrMore (Just "TypeConstraint")); ]; }; { kind_name = "TypeConstraint"; type_name = "type_constraint"; func_name = "type_constraint"; description = "type_constraint"; prefix = "constraint"; aggregates = []; fields = [("keyword", Token); ("type", Aggregate Specifier)]; }; { kind_name = "ContextConstraint"; type_name = "context_constraint"; func_name = "context_constraint"; description = "context_constraint"; prefix = "ctx_constraint"; aggregates = []; fields = [("keyword", Token); ("ctx_list", ZeroOrOne (Just "Contexts"))]; }; { kind_name = "DarrayTypeSpecifier"; type_name = "darray_type_specifier"; func_name = "darray_type_specifier"; description = "darray_type_specifier"; prefix = "darray"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("key", Just "SimpleTypeSpecifier"); ("comma", Token); ("value", Just "SimpleTypeSpecifier"); ("trailing_comma", ZeroOrOne Token); ("right_angle", Token); ]; }; { kind_name = "DictionaryTypeSpecifier"; type_name = "dictionary_type_specifier"; func_name = "dictionary_type_specifier"; description = "dictionary_type_specifier"; prefix = "dictionary_type"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("members", ZeroOrMore (Aggregate Specifier)); ("right_angle", Token); ]; }; { kind_name = "ClosureTypeSpecifier"; type_name = "closure_type_specifier"; func_name = "closure_type_specifier"; description = "closure_type_specifier"; prefix = "closure"; aggregates = [Specifier]; fields = [ ("outer_left_paren", Token); ("readonly_keyword", ZeroOrOne Token); ("function_keyword", Token); ("inner_left_paren", Token); ("parameter_list", ZeroOrMore (Just "ClosureParameterTypeSpecifier")); ("inner_right_paren", Token); ("contexts", ZeroOrOne (Just "Contexts")); ("colon", Token); ("readonly_return", ZeroOrOne Token); ("return_type", Aggregate Specifier); ("outer_right_paren", Token); ]; }; { kind_name = "ClosureParameterTypeSpecifier"; type_name = "closure_parameter_type_specifier"; func_name = "closure_parameter_type_specifier"; description = "closure_parameter_type_specifier"; prefix = "closure_parameter"; aggregates = [Specifier]; fields = [ ("call_convention", ZeroOrOne Token); ("readonly", ZeroOrOne Token); ("type", Aggregate Specifier); ]; }; { kind_name = "TypeRefinement"; type_name = "type_refinement"; func_name = "type_refinement"; description = "type_refinement"; prefix = "type_refinement"; aggregates = [Specifier]; fields = [ ("type", Aggregate Specifier); ("keyword", Token); ("left_brace", Token); ("members", ZeroOrMore (Aggregate RefinementMember)); ("right_brace", Token); ]; }; { kind_name = "TypeInRefinement"; type_name = "type_in_refinement"; func_name = "type_in_refinement"; description = "type_in_refinement"; prefix = "type_in_refinement"; aggregates = [RefinementMember]; fields = [ ("keyword", Token); ("name", Token); ("type_parameters", ZeroOrOne (Just "TypeParameters")); ("constraints", ZeroOrMore (Just "TypeConstraint")); ("equal", ZeroOrOne Token); ("type", ZeroOrOne (Aggregate Specifier)); ]; }; { kind_name = "CtxInRefinement"; type_name = "ctx_in_refinement"; func_name = "ctx_in_refinement"; description = "ctx_in_refinement"; prefix = "ctx_in_refinement"; aggregates = [RefinementMember]; fields = [ ("keyword", Token); ("name", Token); ("type_parameters", ZeroOrOne (Just "TypeParameters")); ("constraints", ZeroOrMore (Just "ContextConstraint")); ("equal", ZeroOrOne Token); ("ctx_list", ZeroOrOne (Aggregate Specifier)); ]; }; { kind_name = "ClassnameTypeSpecifier"; type_name = "classname_type_specifier"; func_name = "classname_type_specifier"; description = "classname_type_specifier"; prefix = "classname"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_angle", Token); ("type", Aggregate Specifier); ("trailing_comma", ZeroOrOne Token); ("right_angle", Token); ]; }; { kind_name = "FieldSpecifier"; type_name = "field_specifier"; func_name = "field_specifier"; description = "field_specifier"; prefix = "field"; aggregates = [Specifier]; fields = [ ("question", ZeroOrOne Token); ("name", Aggregate Expression); ("arrow", Token); ("type", Aggregate Specifier); ]; }; { kind_name = "FieldInitializer"; type_name = "field_initializer"; func_name = "field_initializer"; description = "field_initializer"; prefix = "field_initializer"; aggregates = []; fields = [ ("name", Aggregate Expression); ("arrow", Token); ("value", Aggregate Expression); ]; }; { kind_name = "ShapeTypeSpecifier"; type_name = "shape_type_specifier"; func_name = "shape_type_specifier"; description = "shape_type_specifier"; prefix = "shape_type"; aggregates = [Specifier]; fields = [ ("keyword", Token); ("left_paren", Token); ("fields", ZeroOrMore (Just "FieldSpecifier")); ("ellipsis", ZeroOrOne Token); ("right_paren", Token); ]; }; { kind_name = "ShapeExpression"; type_name = "shape_expression"; func_name = "shape_expression"; description = "shape_expression"; prefix = "shape_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("left_paren", Token); ("fields", ZeroOrMore (Just "FieldInitializer")); ("right_paren", Token); ]; }; { kind_name = "TupleExpression"; type_name = "tuple_expression"; func_name = "tuple_expression"; description = "tuple_expression"; prefix = "tuple_expression"; aggregates = [Expression; ConstructorExpression; LambdaBody]; fields = [ ("keyword", Token); ("left_paren", Token); ("items", ZeroOrMore (Aggregate Expression)); ("right_paren", Token); ]; } (* TODO: Rename this; generic type specifiers are also used for * type-annotated method calls and for object creations with type annotations * This naming is now very misleading (e.g. class_type being any name). *); { kind_name = "GenericTypeSpecifier"; type_name = "generic_type_specifier"; func_name = "generic_type_specifier"; description = "generic_type_specifier"; prefix = "generic"; aggregates = [Specifier]; fields = [("class_type", Token); ("argument_list", Just "TypeArguments")]; }; { kind_name = "NullableTypeSpecifier"; type_name = "nullable_type_specifier"; func_name = "nullable_type_specifier"; description = "nullable_type_specifier"; prefix = "nullable"; aggregates = [Specifier]; fields = [("question", Token); ("type", Aggregate Specifier)]; }; { kind_name = "LikeTypeSpecifier"; type_name = "like_type_specifier"; func_name = "like_type_specifier"; description = "like_type_specifier"; prefix = "like"; aggregates = [Specifier]; fields = [("tilde", Token); ("type", Aggregate Specifier)]; }; { kind_name = "SoftTypeSpecifier"; type_name = "soft_type_specifier"; func_name = "soft_type_specifier"; description = "soft_type_specifier"; prefix = "soft"; aggregates = [Specifier]; fields = [("at", Token); ("type", Aggregate Specifier)]; }; { kind_name = "AttributizedSpecifier"; type_name = "attributized_specifier"; func_name = "attributized_specifier"; description = "attributized_specifier"; prefix = "attributized_specifier"; aggregates = []; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("type", Aggregate Specifier); ]; }; { kind_name = "ReifiedTypeArgument"; type_name = "reified_type_argument"; func_name = "reified_type_argument"; description = "reified_type_argument"; prefix = "reified_type_argument"; aggregates = []; fields = [("reified", Token); ("type", Aggregate Specifier)]; }; { kind_name = "TypeArguments"; type_name = "type_arguments"; func_name = "type_arguments"; description = "type_arguments"; prefix = "type_arguments"; aggregates = []; fields = [ ("left_angle", Token); ("types", ZeroOrMore (Just "AttributizedSpecifier")); ("right_angle", Token); ]; }; { kind_name = "TypeParameters"; type_name = "type_parameters"; func_name = "type_parameters"; description = "type_parameters"; prefix = "type_parameters"; aggregates = []; fields = [ ("left_angle", Token); ("parameters", ZeroOrMore (Just "TypeParameter")); ("right_angle", Token); ]; }; { kind_name = "TupleTypeSpecifier"; type_name = "tuple_type_specifier"; func_name = "tuple_type_specifier"; description = "tuple_type_specifier"; prefix = "tuple"; aggregates = [Specifier]; fields = [ ("left_paren", Token); ("types", ZeroOrMore (Aggregate Specifier)); ("right_paren", Token); ]; }; { kind_name = "UnionTypeSpecifier"; type_name = "union_type_specifier"; func_name = "union_type_specifier"; description = "union_type_specifier"; prefix = "union"; aggregates = [Specifier]; fields = [ ("left_paren", Token); ("types", ZeroOrMore (Aggregate Specifier)); ("right_paren", Token); ]; }; { kind_name = "IntersectionTypeSpecifier"; type_name = "intersection_type_specifier"; func_name = "intersection_type_specifier"; description = "intersection_type_specifier"; prefix = "intersection"; aggregates = [Specifier]; fields = [ ("left_paren", Token); ("types", ZeroOrMore (Aggregate Specifier)); ("right_paren", Token); ]; }; { kind_name = "ErrorSyntax"; type_name = "error"; func_name = "error"; description = "error"; prefix = "error"; aggregates = []; fields = [("error", Just "error")]; }; { kind_name = "ListItem"; type_name = "list_item"; func_name = "list_item"; description = "list_item"; prefix = "list"; aggregates = []; fields = [("item", Just "error"); ("separator", Token)]; }; { kind_name = "EnumClassLabelExpression"; type_name = "enum_class_label_expression"; func_name = "enum_class_label_expression"; description = "enum_class_label"; prefix = "enum_class_label"; aggregates = [Expression]; fields = [ ("qualifier", ZeroOrOne (Aggregate Expression)); ("hash", Token); ("expression", Token); ]; }; { kind_name = "ModuleDeclaration"; type_name = "module_declaration"; func_name = "module_declaration"; description = "module_declaration"; prefix = "module_declaration"; aggregates = [TopLevelDeclaration]; fields = [ ("attribute_spec", ZeroOrOne (Aggregate AttributeSpecification)); ("new_keyword", Token); ("module_keyword", Token); ("name", Aggregate Name); ("left_brace", Token); ("exports", Just "ModuleExports"); ("imports", Just "ModuleImports"); ("right_brace", Token); ]; }; { kind_name = "ModuleExports"; type_name = "module_exports"; func_name = "module_exports"; description = "module_exports"; prefix = "module_exports"; aggregates = []; fields = [ ("exports_keyword", Token); ("left_brace", Token); ("exports", ZeroOrMore (Aggregate Name)); ("right_brace", Token); ]; }; { kind_name = "ModuleImports"; type_name = "module_imports"; func_name = "module_imports"; description = "module_imports"; prefix = "module_imports"; aggregates = []; fields = [ ("imports_keyword", Token); ("left_brace", Token); ("imports", ZeroOrMore (Aggregate Name)); ("right_brace", Token); ]; }; { kind_name = "ModuleMembershipDeclaration"; type_name = "module_membership_declaration"; func_name = "module_membership_declaration"; description = "module_membership_declaration"; prefix = "module_membership_declaration"; aggregates = [TopLevelDeclaration]; fields = [ ("module_keyword", Token); ("name", Aggregate Name); ("semicolon", Token); ]; }; { kind_name = "PackageExpression"; type_name = "package_expression"; func_name = "package_expression"; description = "package_expression"; prefix = "package_expression"; aggregates = []; fields = [("keyword", Token); ("name", Aggregate Name)]; }; ] (******************************************************************************( * Utilities for aggregate types )******************************************************************************) let generated_aggregate_types = [ TopLevelDeclaration; Expression; Specifier; Parameter; ClassBodyDeclaration; EnumClassBodyDeclaration; RefinementMember; Statement; SwitchLabel; LambdaBody; ConstructorExpression; NamespaceInternals; XHPAttribute; ObjectCreationWhat; TODO; Name; ] let string_of_aggregate_type = function | TopLevelDeclaration -> "TopLevelDeclaration" | Expression -> "Expression" | Specifier -> "Specifier" | Parameter -> "Parameter" | AttributeSpecification -> "AttributeSpecification" | ClassBodyDeclaration -> "ClassBodyDeclaration" | EnumClassBodyDeclaration -> "EnumClassBodyDeclaration" | RefinementMember -> "RefinementMember" | Statement -> "Statement" | SwitchLabel -> "SwitchLabel" | LambdaBody -> "LambdaBody" | ConstructorExpression -> "ConstructorExpression" | NamespaceInternals -> "NamespaceInternals" | XHPAttribute -> "XHPAttribute" | ObjectCreationWhat -> "ObjectCreationWhat" | Pattern -> "Pattern" | TODO -> "TODO" | Name -> "Name" module AggregateKey = struct type t = aggregate_type let compare (x : t) (y : t) = compare x y end module AggMap = WrappedMap.Make (AggregateKey) let aggregation_of_top_level_declaration = List.filter (fun x -> List.mem TopLevelDeclaration x.aggregates) schema let aggregation_of_expression = List.filter (fun x -> List.mem Expression x.aggregates) schema let aggregation_of_specifier = List.filter (fun x -> List.mem Specifier x.aggregates) schema let aggregation_of_parameter = List.filter (fun x -> List.mem Parameter x.aggregates) schema let aggregation_of_attribute_specification = List.filter (fun x -> List.mem AttributeSpecification x.aggregates) schema let aggregation_of_class_body_declaration = List.filter (fun x -> List.mem ClassBodyDeclaration x.aggregates) schema let aggregation_of_enum_class_body_declaration = List.filter (fun x -> List.mem EnumClassBodyDeclaration x.aggregates) schema let aggregation_of_refinement_member = List.filter (fun x -> List.mem RefinementMember x.aggregates) schema let aggregation_of_statement = List.filter (fun x -> List.mem Statement x.aggregates) schema let aggregation_of_switch_label = List.filter (fun x -> List.mem SwitchLabel x.aggregates) schema let aggregation_of_lambda_body = List.filter (fun x -> List.mem LambdaBody x.aggregates) schema let aggregation_of_constructor_expression = List.filter (fun x -> List.mem ConstructorExpression x.aggregates) schema let aggregation_of_namespace_internals = List.filter (fun x -> List.mem NamespaceInternals x.aggregates) schema let aggregation_of_xhp_attribute = List.filter (fun x -> List.mem XHPAttribute x.aggregates) schema let aggregation_of_object_creation_what = List.filter (fun x -> List.mem ObjectCreationWhat x.aggregates) schema let aggregation_of_pattern = List.filter (fun x -> List.mem Pattern x.aggregates) schema let aggregation_of_todo_aggregate = List.filter (fun x -> List.mem TODO x.aggregates) schema let aggregation_of_name_aggregate = List.filter (fun x -> List.mem Name x.aggregates) schema let aggregation_of = function | TopLevelDeclaration -> aggregation_of_top_level_declaration | Expression -> aggregation_of_expression | Specifier -> aggregation_of_specifier | Parameter -> aggregation_of_parameter | AttributeSpecification -> aggregation_of_attribute_specification | ClassBodyDeclaration -> aggregation_of_class_body_declaration | EnumClassBodyDeclaration -> aggregation_of_enum_class_body_declaration | RefinementMember -> aggregation_of_refinement_member | Statement -> aggregation_of_statement | SwitchLabel -> aggregation_of_switch_label | LambdaBody -> aggregation_of_lambda_body | ConstructorExpression -> aggregation_of_constructor_expression | NamespaceInternals -> aggregation_of_namespace_internals | XHPAttribute -> aggregation_of_xhp_attribute | ObjectCreationWhat -> aggregation_of_object_creation_what | Pattern -> aggregation_of_pattern | TODO -> aggregation_of_todo_aggregate | Name -> aggregation_of_name_aggregate let aggregate_type_name = function | TopLevelDeclaration -> "top_level_declaration" | Expression -> "expression" | Specifier -> "specifier" | Parameter -> "parameter" | AttributeSpecification -> "attribute_specification" | ClassBodyDeclaration -> "class_body_declaration" | EnumClassBodyDeclaration -> "enum_class_body_declaration" | RefinementMember -> "refinement_member" | Statement -> "statement" | SwitchLabel -> "switch_label" | LambdaBody -> "lambda_body" | ConstructorExpression -> "constructor_expression" | NamespaceInternals -> "namespace_internals" | XHPAttribute -> "xhp_attribute" | ObjectCreationWhat -> "object_creation_what" | Pattern -> "pattern" | TODO -> "todo_aggregate" | Name -> "name_aggregate" let aggregate_type_pfx_trim = function | TopLevelDeclaration -> ("TLD", "\\(Declaration\\|Statement\\)$") | Expression -> ("Expr", "Expression$") | Specifier -> ("Spec", "\\(Type\\)?Specifier$") | Parameter -> ("Param", "") | AttributeSpecification -> ("AttrSpec", "") | ClassBodyDeclaration -> ("Body", "Declaration") | EnumClassBodyDeclaration -> ("ECBody", "Declaration") | RefinementMember -> ("TypeRefinementMember", "InRefinement$") | Statement -> ("Stmt", "Statement$") | SwitchLabel -> ("Switch", "Label$") | LambdaBody -> ("Lambda", "Expression$") | ConstructorExpression -> ("CExpr", "Expression$") | NamespaceInternals -> ("NSI", "") | XHPAttribute -> ("XHPAttr", "") | ObjectCreationWhat -> ("New", "") | Pattern -> ("Patt", "Pattern$") | TODO -> ("TODO", "") | Name -> ("Name", "") (******************************************************************************( * Useful for debugging / schema alterations )******************************************************************************) let string_of_child_spec = let p = Printf.sprintf in let rec aux = function | Token -> "Token" | Just x -> p "Just \"%s\"" x | Aggregate x -> p "Aggregate %s" (string_of_aggregate_type x) | ZeroOrMore c -> p "ZeroOrMore (%s)" (aux c) | ZeroOrOne c -> p "ZeroOrOne (%s)" (aux c) in aux
OCaml
hhvm/hphp/hack/src/parser/schema/token_schema_definition.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type token_node = { token_kind: string; token_text: string; (* Whether the token is allowed as identifier, i.e., in practice, whether * it is allowed as function name or class name. * For example, darray is allowed as identifier. The following is legit: * * function darray() {} * * NB 1: This does not apply to names of constants, enum members, class * members including methods: for those, absolutely all keywords are * allowed. * * NB 2: for class names, in addition, a few other keywords are "reserved", * i.e. not allowed as class names. These include type names like int or * bool. Those "reserved" keywords are not defined here. See syntax error * `reserved_keyword_as_type_name`. *) allowed_as_identifier: bool; } let make_token_node token_kind token_text ?(allowed_as_identifier = false) () = { token_kind; token_text; allowed_as_identifier } let variable_text_tokens = [ make_token_node "ErrorToken" "error_token" (); make_token_node "Name" "name" (); make_token_node "Variable" "variable" (); make_token_node "DecimalLiteral" "decimal_literal" (); make_token_node "OctalLiteral" "octal_literal" (); make_token_node "HexadecimalLiteral" "hexadecimal_literal" (); make_token_node "BinaryLiteral" "binary_literal" (); make_token_node "FloatingLiteral" "floating_literal" (); make_token_node "SingleQuotedStringLiteral" "single_quoted_string_literal" (); make_token_node "DoubleQuotedStringLiteral" "double_quoted_string_literal" (); make_token_node "DoubleQuotedStringLiteralHead" "double_quoted_string_literal_head" (); make_token_node "StringLiteralBody" "string_literal_body" (); make_token_node "DoubleQuotedStringLiteralTail" "double_quoted_string_literal_tail" (); make_token_node "HeredocStringLiteral" "heredoc_string_literal" (); make_token_node "HeredocStringLiteralHead" "heredoc_string_literal_head" (); make_token_node "HeredocStringLiteralTail" "heredoc_string_literal_tail" (); make_token_node "NowdocStringLiteral" "nowdoc_string_literal" (); make_token_node "BooleanLiteral" "boolean_literal" (); make_token_node "XHPCategoryName" "XHP_category_name" (); make_token_node "XHPElementName" "XHP_element_name" (); make_token_node "XHPClassName" "XHP_class_name" (); make_token_node "XHPStringLiteral" "XHP_string_literal" (); make_token_node "XHPBody" "XHP_body" (); make_token_node "XHPComment" "XHP_comment" (); make_token_node "Hashbang" "hashbang" (); ] let no_text_tokens = [make_token_node "EndOfFile" "end_of_file" ()] let given_text_tokens = [ make_token_node "Abstract" "abstract" (); make_token_node "Arraykey" "arraykey" ~allowed_as_identifier:true (); make_token_node "As" "as" (); make_token_node "Async" "async" (); make_token_node "Attribute" "attribute" ~allowed_as_identifier:true (); make_token_node "Await" "await" (); make_token_node "Backslash" "\\" (); make_token_node "Binary" "binary" ~allowed_as_identifier:true (); make_token_node "Bool" "bool" ~allowed_as_identifier:true (); make_token_node "Boolean" "boolean" ~allowed_as_identifier:true (); make_token_node "Break" "break" (); make_token_node "Case" "case" (); make_token_node "Catch" "catch" (); make_token_node "Category" "category" ~allowed_as_identifier:true (); make_token_node "Children" "children" ~allowed_as_identifier:true (); make_token_node "Class" "class" (); make_token_node "Classname" "classname" ~allowed_as_identifier:true (); make_token_node "Clone" "clone" (); make_token_node "Concurrent" "concurrent" (); make_token_node "Const" "const" (); make_token_node "Construct" "__construct" (); make_token_node "Continue" "continue" (); make_token_node "Ctx" "ctx" (); make_token_node "Darray" "darray" ~allowed_as_identifier:true (); make_token_node "Default" "default" (); make_token_node "Dict" "dict" ~allowed_as_identifier:true (); make_token_node "Do" "do" (); make_token_node "Double" "double" ~allowed_as_identifier:true (); make_token_node "Echo" "echo" (); make_token_node "Else" "else" (); make_token_node "Empty" "empty" (); make_token_node "Endif" "endif" (); make_token_node "Enum" "enum" ~allowed_as_identifier:true (); make_token_node "Eval" "eval" (); make_token_node "Exports" "exports" ~allowed_as_identifier:true (); make_token_node "Extends" "extends" (); make_token_node "Fallthrough" "fallthrough" ~allowed_as_identifier:true (); make_token_node "Float" "float" ~allowed_as_identifier:true (); make_token_node "File" "file" ~allowed_as_identifier:true (); make_token_node "Final" "final" (); make_token_node "Finally" "finally" (); make_token_node "For" "for" (); make_token_node "Foreach" "foreach" (); make_token_node "Function" "function" (); make_token_node "Global" "global" (); make_token_node "If" "if" (); make_token_node "Implements" "implements" (); make_token_node "Imports" "imports" ~allowed_as_identifier:true (); make_token_node "Include" "include" (); make_token_node "Include_once" "include_once" (); make_token_node "Inout" "inout" (); make_token_node "Instanceof" "instanceof" (); make_token_node "Insteadof" "insteadof" (); make_token_node "Int" "int" ~allowed_as_identifier:true (); make_token_node "Integer" "integer" ~allowed_as_identifier:true (); make_token_node "Interface" "interface" (); make_token_node "Is" "is" ~allowed_as_identifier:true (); make_token_node "Isset" "isset" (); make_token_node "Keyset" "keyset" ~allowed_as_identifier:true (); make_token_node "Lateinit" "lateinit" (); make_token_node "List" "list" (); make_token_node "Match" "match" ~allowed_as_identifier:true (); make_token_node "Mixed" "mixed" ~allowed_as_identifier:true (); make_token_node "Module" "module" (); make_token_node "Namespace" "namespace" (); make_token_node "New" "new" (); make_token_node "Newctx" "newctx" ~allowed_as_identifier:true (); make_token_node "Newtype" "newtype" ~allowed_as_identifier:true (); make_token_node "Noreturn" "noreturn" ~allowed_as_identifier:true (); make_token_node "Num" "num" ~allowed_as_identifier:true (); make_token_node "Parent" "parent" ~allowed_as_identifier:true (); make_token_node "Print" "print" (); make_token_node "Private" "private" (); make_token_node "Protected" "protected" (); make_token_node "Public" "public" (); make_token_node "Real" "real" ~allowed_as_identifier:true (); make_token_node "Reify" "reify" ~allowed_as_identifier:true (); make_token_node "Require" "require" (); make_token_node "Require_once" "require_once" (); make_token_node "Required" "required" (); make_token_node "Resource" "resource" ~allowed_as_identifier:true (); make_token_node "Return" "return" (); make_token_node "Self" "self" ~allowed_as_identifier:true (); make_token_node "Shape" "shape" (); make_token_node "Static" "static" (); make_token_node "String" "string" ~allowed_as_identifier:true (); make_token_node "Super" "super" ~allowed_as_identifier:true (); make_token_node "Switch" "switch" (); make_token_node "This" "this" ~allowed_as_identifier:true (); make_token_node "Throw" "throw" (); make_token_node "Trait" "trait" (); make_token_node "Try" "try" (); make_token_node "Tuple" "tuple" (); make_token_node "Type" "type" ~allowed_as_identifier:true (); make_token_node "Unset" "unset" (); make_token_node "Upcast" "upcast" ~allowed_as_identifier:true (); make_token_node "Use" "use" (); make_token_node "Using" "using" (); make_token_node "Var" "var" (); make_token_node "Varray" "varray" ~allowed_as_identifier:true (); make_token_node "Vec" "vec" ~allowed_as_identifier:true (); make_token_node "Void" "void" ~allowed_as_identifier:true (); make_token_node "With" "with" ~allowed_as_identifier:true (); make_token_node "Where" "where" ~allowed_as_identifier:true (); make_token_node "While" "while" (); make_token_node "Yield" "yield" (); make_token_node "NullLiteral" "null" ~allowed_as_identifier:true (); make_token_node "LeftBracket" "[" (); make_token_node "RightBracket" "]" (); make_token_node "LeftParen" "(" (); make_token_node "RightParen" ")" (); make_token_node "LeftBrace" "{" (); make_token_node "RightBrace" "}" (); make_token_node "Dot" "." (); make_token_node "MinusGreaterThan" "->" (); make_token_node "PlusPlus" "++" (); make_token_node "MinusMinus" "--" (); make_token_node "StarStar" "**" (); make_token_node "Star" "*" (); make_token_node "Plus" "+" (); make_token_node "Minus" "-" (); make_token_node "Tilde" "~" (); make_token_node "Exclamation" "!" (); make_token_node "Dollar" "$" (); make_token_node "Slash" "/" (); make_token_node "Percent" "%" (); make_token_node "LessThanEqualGreaterThan" "<=>" (); make_token_node "LessThanLessThan" "<<" (); make_token_node "GreaterThanGreaterThan" ">>" (); make_token_node "LessThan" "<" (); make_token_node "GreaterThan" ">" (); make_token_node "LessThanEqual" "<=" (); make_token_node "GreaterThanEqual" ">=" (); make_token_node "EqualEqual" "==" (); make_token_node "EqualEqualEqual" "===" (); make_token_node "ExclamationEqual" "!=" (); make_token_node "ExclamationEqualEqual" "!==" (); make_token_node "Carat" "^" (); make_token_node "Bar" "|" (); make_token_node "Ampersand" "&" (); make_token_node "AmpersandAmpersand" "&&" (); make_token_node "BarBar" "||" (); make_token_node "Question" "?" (); make_token_node "QuestionAs" "?as" (); make_token_node "QuestionColon" "?:" (); make_token_node "QuestionQuestion" "??" (); make_token_node "QuestionQuestionEqual" "??=" (); make_token_node "Colon" ":" (); make_token_node "Semicolon" ";" (); make_token_node "Equal" "=" (); make_token_node "StarStarEqual" "**=" (); make_token_node "StarEqual" "*=" (); make_token_node "SlashEqual" "/=" (); make_token_node "PercentEqual" "%=" (); make_token_node "PlusEqual" "+=" (); make_token_node "MinusEqual" "-=" (); make_token_node "DotEqual" ".=" (); make_token_node "LessThanLessThanEqual" "<<=" (); make_token_node "GreaterThanGreaterThanEqual" ">>=" (); make_token_node "AmpersandEqual" "&=" (); make_token_node "CaratEqual" "^=" (); make_token_node "BarEqual" "|=" (); make_token_node "Comma" "," (); make_token_node "At" "@" (); make_token_node "ColonColon" "::" (); make_token_node "EqualGreaterThan" "=>" (); make_token_node "EqualEqualGreaterThan" "==>" (); make_token_node "QuestionMinusGreaterThan" "?->" (); make_token_node "DotDotDot" "..." (); make_token_node "DollarDollar" "$$" (); make_token_node "BarGreaterThan" "|>" (); make_token_node "SlashGreaterThan" "/>" (); make_token_node "LessThanSlash" "</" (); make_token_node "LessThanQuestion" "<?" (); make_token_node "Backtick" "`" (); make_token_node "XHP" "xhp" ~allowed_as_identifier:true (); make_token_node "Hash" "#" (); make_token_node "Readonly" "readonly" (); make_token_node "Internal" "internal" ~allowed_as_identifier:true (); make_token_node "Package" "package" (); make_token_node "Let" "let" ~allowed_as_identifier:true (); ] let tokens = variable_text_tokens @ no_text_tokens @ given_text_tokens
OCaml
hhvm/hphp/hack/src/parser/smart_constructors/smartConstructors.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * * This module contains a signature which can be used to describe smart * constructors. *) module ParserEnv = Full_fidelity_parser_env module type SmartConstructors_S = sig module Token : Lexable_token_sig.LexableToken_S type t (* state *) [@@deriving show, sexp_of] type r (* smart constructor return type *) [@@deriving show] val rust_parse : Full_fidelity_source_text.t -> ParserEnv.t -> t * r * Full_fidelity_syntax_error.t list * Rust_pointer.t option val initial_state : ParserEnv.t -> t end
OCaml
hhvm/hphp/hack/src/parser/smart_constructors/smartConstructorsWrappers.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * * This module contains smart constructors implementation that can be used to * build AST. *) module type SC_S = SmartConstructors.SmartConstructors_S module SK = Full_fidelity_syntax_kind module type SyntaxKind_S = sig include SC_S type original_sc_r [@@deriving show] end module SyntaxKind (SC : SC_S) : SyntaxKind_S with module Token = SC.Token and type original_sc_r = SC.r and type t = SC.t = struct module Token = SC.Token type original_sc_r = SC.r [@@deriving show] type t = SC.t [@@deriving show, sexp_of] type r = SK.t * SC.r [@@deriving show] let compose : SK.t -> t * SC.r -> t * r = (fun kind (state, res) -> (state, (kind, res))) let rust_parse text env = let (state, res, errors, pointer) = SC.rust_parse text env in let (state, res) = compose SK.Script (state, res) in (state, res, errors, pointer) let initial_state = SC.initial_state end
OCaml
hhvm/hphp/hack/src/parser/smart_constructors/syntaxSmartConstructors.ml
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * * This module contains smart constructors implementation that can be used to * build AST. *) open Sexplib.Std module type SC_S = SmartConstructors.SmartConstructors_S module ParserEnv = Full_fidelity_parser_env module type State_S = sig type r [@@deriving show] type t [@@deriving show, sexp_of] val initial : ParserEnv.t -> t val next : t -> r list -> t end module type RustParser_S = sig type t type r val rust_parse : Full_fidelity_source_text.t -> ParserEnv.t -> t * r * Full_fidelity_syntax_error.t list * Rust_pointer.t option end module WithSyntax (Syntax : Syntax_sig.Syntax_S) = struct module WithState (State : State_S with type r = Syntax.t) = struct module WithRustParser (RustParser : RustParser_S with type t = State.t with type r = Syntax.t) = struct module Token = Syntax.Token type t = State.t [@@deriving show, sexp_of] type r = Syntax.t [@@deriving show] let rust_parse = RustParser.rust_parse let initial_state = State.initial end end include WithState (struct type r = Syntax.t [@@deriving show] type t = unit [@@deriving show, sexp_of] let initial _ = () let next () _ = () end) include WithRustParser (struct type r = Syntax.t type t = unit let rust_parse = Syntax.rust_parse end) end
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/arena_state.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use bumpalo::Bump; use ocamlrep::Allocator; use ocamlrep::ToOcamlRep; use super::has_arena::HasArena; #[derive(Clone)] pub struct State<'a> { pub arena: &'a Bump, } impl<'a> HasArena<'a> for State<'a> { fn get_arena(&self) -> &'a Bump { self.arena } } impl ToOcamlRep for State<'_> { fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> ocamlrep::Value<'a> { ().to_ocamlrep(alloc) } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/has_arena.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use bumpalo::Bump; pub trait HasArena<'a> { fn get_arena(&self) -> &'a Bump; }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/mod.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. pub mod arena_state; pub mod has_arena; pub mod positioned_syntax; pub mod positioned_token; pub mod positioned_trivia; pub mod positioned_value; pub mod serialize; pub mod syntax; pub mod syntax_impl_generated; pub mod syntax_variant_generated; mod syntax_children_iterator; mod syntax_children_iterator_generated; mod syntax_serialize_generated; mod syntax_type_impl_generated; pub use syntax_serialize_generated::*;
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/positioned_syntax.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use super::positioned_token::PositionedToken; use super::positioned_value::PositionedValue; use super::syntax::Syntax; use crate::source_text::SourceText; use crate::syntax_trait::SyntaxTrait; pub type PositionedSyntax<'a> = Syntax<'a, PositionedToken<'a>, PositionedValue<'a>>; impl<'a> SyntaxTrait for Syntax<'a, PositionedToken<'a>, PositionedValue<'a>> { fn offset(&self) -> Option<usize> { Some(self.start_offset()) } fn width(&self) -> usize { self.value.width() } fn leading_width(&self) -> usize { self.value.leading_width() } fn trailing_width(&self) -> usize { self.value.trailing_width() } fn full_width(&self) -> usize { self.leading_width() + self.width() + self.trailing_width() } fn leading_start_offset(&self) -> usize { self.value.start_offset() } fn extract_text<'src>(&self, source_text: &'src SourceText<'_>) -> Option<&'src str> { Some(self.text(source_text)) } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/positioned_token.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use bumpalo::Bump; use crate::compact_trivia::CompactTrivia; use crate::compact_trivia::TriviaKinds; use crate::syntax_by_ref::positioned_trivia; use crate::syntax_by_ref::positioned_trivia::PositionedTrivia; use crate::trivia_factory::SimpleTriviaFactoryImpl; pub type PositionedToken<'a> = internal::PositionedToken<'a, usize>; pub type TokenFactory<'a> = internal::TokenFactory<'a, SimpleTriviaFactoryImpl<CompactTrivia>, usize>; impl internal::TriviaRep for usize { type Trivia = CompactTrivia; fn from_trivia(t: Self::Trivia) -> Self { t.width } fn clone_trivia(&self, kinds: TriviaKinds) -> Self::Trivia { CompactTrivia { kinds, width: *self, } } fn width(&self) -> usize { *self } fn is_empty(&self) -> bool { *self == 0 } } impl internal::SizedTrivia for CompactTrivia { fn kinds(&self) -> TriviaKinds { self.kinds } fn width(&self) -> usize { self.width } } impl<'a> internal::TokenFactory<'a, SimpleTriviaFactoryImpl<CompactTrivia>, usize> { pub fn new(arena: &'a Bump) -> Self { Self { arena, trivia_factory: SimpleTriviaFactoryImpl::new(), _phantom_data: std::marker::PhantomData, } } } pub type PositionedTokenFullTrivia<'a> = internal::PositionedToken<'a, PositionedTrivia<'a>>; pub type TokenFactoryFullTrivia<'a> = internal::TokenFactory<'a, positioned_trivia::Factory<'a>, PositionedTrivia<'a>>; impl<'a> internal::TriviaRep for PositionedTrivia<'a> { type Trivia = PositionedTrivia<'a>; fn from_trivia(t: Self::Trivia) -> Self { t } fn clone_trivia(&self, _: TriviaKinds) -> Self::Trivia { self.clone() } fn width(&self) -> usize { self.iter().map(|t| t.width).sum() } fn is_empty(&self) -> bool { self.is_empty() } } impl internal::SizedTrivia for PositionedTrivia<'_> { fn kinds(&self) -> TriviaKinds { self.iter().fold(TriviaKinds::empty(), |k, t| { k | TriviaKinds::from_kind(t.kind) }) } fn width(&self) -> usize { self.iter().map(|t| t.width).sum() } } impl<'a> internal::TokenFactory<'a, positioned_trivia::Factory<'a>, PositionedTrivia<'a>> { pub fn new(arena: &'a Bump) -> Self { Self { arena, trivia_factory: positioned_trivia::Factory { arena }, _phantom_data: std::marker::PhantomData, } } } pub(crate) mod internal { use bumpalo::Bump; use crate::compact_trivia::CompactTrivia; use crate::compact_trivia::TriviaKinds; use crate::lexable_token::LexablePositionedToken; use crate::lexable_token::LexableToken; use crate::lexable_trivia::LexableTrivia; use crate::positioned_trivia::PositionedTrivium; use crate::source_text::SourceText; use crate::token_factory; use crate::token_kind::TokenKind; use crate::trivia_factory::SimpleTriviaFactoryImpl; use crate::trivia_factory::TriviaFactory; use crate::trivia_kind::TriviaKind; pub trait SizedTrivia { fn kinds(&self) -> TriviaKinds; fn width(&self) -> usize; } pub trait TriviaRep: std::fmt::Debug { type Trivia: LexableTrivia; fn from_trivia(t: Self::Trivia) -> Self; fn clone_trivia(&self, kinds: TriviaKinds) -> Self::Trivia; fn width(&self) -> usize; fn is_empty(&self) -> bool; } #[derive(Debug, PartialEq)] pub struct PositionedTokenImpl<TriviaRep> { pub kind: TokenKind, pub offset: usize, // Beginning of first trivia pub width: usize, // Width of actual token, not counting trivia pub leading_kinds: TriviaKinds, pub trailing_kinds: TriviaKinds, pub leading: TriviaRep, pub trailing: TriviaRep, } #[derive(Debug)] pub struct PositionedToken<'a, TriviaRep>(&'a PositionedTokenImpl<TriviaRep>); // derive(Clone) requires Trivia implements Clone, which isn't necessary. impl<'a, TriviaRep> Clone for PositionedToken<'a, TriviaRep> { fn clone(&self) -> Self { Self(self.0) } } // derive(Copy) requires Trivia implements Copy, which isn't necessary. impl<'a, TriviaRep> Copy for PositionedToken<'a, TriviaRep> {} impl<TR: TriviaRep + Clone> PositionedTokenImpl<TR> { fn start_offset(&self) -> usize { self.offset + self.leading.width() } fn end_offset(&self) -> usize { let w = self.width; let w = if w == 0 { 0 } else { w - 1 }; self.start_offset() + w } fn clone(x: &Self) -> Self { Self { kind: x.kind, offset: x.offset, width: x.width, leading_kinds: x.leading_kinds, trailing_kinds: x.trailing_kinds, leading: x.leading.clone(), trailing: x.trailing.clone(), } } } impl<'a, TR: TriviaRep + Clone> PositionedToken<'a, TR> { pub fn start_offset(&self) -> usize { self.0.start_offset() } pub fn end_offset(&self) -> usize { self.0.end_offset() } pub fn inner_ptr_eq(x: &Self, y: &Self) -> bool { std::ptr::eq(x.0, y.0) } pub fn offset(&self) -> usize { self.0.offset } pub fn leading_kinds(&self) -> TriviaKinds { self.0.leading_kinds } pub fn trailing_kinds(&self) -> TriviaKinds { self.0.trailing_kinds } } impl<'a, TR: TriviaRep + Clone> LexableToken for PositionedToken<'a, TR> where TR::Trivia: Clone, { type Trivia = TR::Trivia; fn kind(&self) -> TokenKind { self.0.kind } fn leading_start_offset(&self) -> Option<usize> { Some(self.0.offset) } fn width(&self) -> usize { self.0.width } fn leading_width(&self) -> usize { self.0.leading.width() } fn trailing_width(&self) -> usize { self.0.trailing.width() } fn full_width(&self) -> usize { self.0.leading.width() + self.0.width + self.0.trailing.width() } fn clone_leading(&self) -> Self::Trivia { self.0.leading.clone_trivia(self.0.leading_kinds) } fn clone_trailing(&self) -> Self::Trivia { self.0.trailing.clone_trivia(self.0.trailing_kinds) } fn leading_is_empty(&self) -> bool { self.0.leading.is_empty() } fn trailing_is_empty(&self) -> bool { self.0.trailing.is_empty() } fn has_leading_trivia_kind(&self, kind: TriviaKind) -> bool { self.0.leading_kinds.has_kind(kind) } fn has_trailing_trivia_kind(&self, kind: TriviaKind) -> bool { self.0.trailing_kinds.has_kind(kind) } fn into_trivia_and_width(self) -> (Self::Trivia, usize, Self::Trivia) { (self.clone_leading(), self.width(), self.clone_trailing()) } } #[derive(Clone)] pub struct TokenFactory<'a, TriviaFactory, TriviaRep> { pub arena: &'a Bump, pub trivia_factory: TriviaFactory, pub _phantom_data: std::marker::PhantomData<TriviaRep>, } impl<'a, TR, TF> token_factory::TokenFactory for TokenFactory<'a, TF, TR> where TF: TriviaFactory + Clone, TF::Trivia: SizedTrivia + 'a, TR: TriviaRep<Trivia = TF::Trivia> + Clone + 'a, { type Token = PositionedToken<'a, TR>; type TriviaFactory = TF; fn make( &mut self, kind: TokenKind, offset: usize, width: usize, leading: TF::Trivia, trailing: TF::Trivia, ) -> Self::Token { PositionedToken(self.arena.alloc(PositionedTokenImpl { kind, offset, width, leading_kinds: leading.kinds(), trailing_kinds: trailing.kinds(), leading: TR::from_trivia(leading), trailing: TR::from_trivia(trailing), })) } fn with_leading(&mut self, token: Self::Token, leading: TF::Trivia) -> Self::Token { let mut new = PositionedTokenImpl::clone(token.0); let token_start_offset = token.0.offset + token.0.leading.width(); new.offset = token_start_offset - leading.width(); new.leading_kinds = leading.kinds(); new.leading = TR::from_trivia(leading); PositionedToken(self.arena.alloc(new)) } fn with_trailing(&mut self, token: Self::Token, trailing: TF::Trivia) -> Self::Token { let mut new = PositionedTokenImpl::clone(token.0); new.trailing_kinds = trailing.kinds(); new.trailing = TR::from_trivia(trailing); PositionedToken(self.arena.alloc(new)) } fn with_kind(&mut self, token: Self::Token, kind: TokenKind) -> Self::Token { let mut new = PositionedTokenImpl::clone(token.0); new.kind = kind; PositionedToken(self.arena.alloc(new)) } fn trivia_factory_mut(&mut self) -> &mut Self::TriviaFactory { &mut self.trivia_factory } } impl<'a> token_factory::TokenMutator for TokenFactory<'a, SimpleTriviaFactoryImpl<CompactTrivia>, usize> { fn trim_left(&mut self, t: &Self::Token, n: usize) -> Self::Token { let mut new = PositionedTokenImpl::clone(t.0); new.leading += n; new.width = t.width() - n; PositionedToken(self.arena.alloc(new)) } fn trim_right(&mut self, t: &Self::Token, n: usize) -> Self::Token { let mut new = PositionedTokenImpl::clone(t.0); new.trailing += n; new.width = t.width() - n; PositionedToken(self.arena.alloc(new)) } fn concatenate(&mut self, s: &Self::Token, e: &Self::Token) -> Self::Token { let mut new = PositionedTokenImpl::clone(s.0); new.width = e.end_offset() + 1 - s.start_offset(); let e_trailing = e.clone_trailing(); new.trailing = usize::from_trivia(e_trailing); PositionedToken(self.arena.alloc(new)) } } impl<'a, TR: TriviaRep + Clone> LexablePositionedToken for PositionedToken<'a, TR> { fn text<'b>(&self, source_text: &'b SourceText<'_>) -> &'b str { source_text.sub_as_str(self.0.start_offset(), self.0.width) } fn text_raw<'b>(&self, source_text: &'b SourceText<'_>) -> &'b [u8] { source_text.sub(self.0.start_offset(), self.0.width) } fn clone_value(&self) -> Self { self.clone() } fn positioned_leading(&self) -> &[PositionedTrivium] { unimplemented!() } fn positioned_trailing(&self) -> &[PositionedTrivium] { unimplemented!() } } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/positioned_trivia.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use bumpalo::collections::Vec; use bumpalo::Bump; use crate::lexable_trivia::LexableTrivia; use crate::positioned_trivia::PositionedTrivium; use crate::trivia_factory::TriviaFactory; use crate::trivia_kind::TriviaKind; pub type PositionedTrivia<'a> = Vec<'a, PositionedTrivium>; impl<'a> LexableTrivia for PositionedTrivia<'a> { type Trivium = PositionedTrivium; fn is_empty(&self) -> bool { self.is_empty() } fn has_kind(&self, kind: TriviaKind) -> bool { self.iter().any(|t| t.kind == kind) } fn push(&mut self, trivium: Self::Trivium) { self.push(trivium) } fn extend(&mut self, other: Self) { for trivium in other { self.push(trivium) } } } #[derive(Clone, Debug)] pub struct Factory<'a> { pub arena: &'a Bump, } impl<'a> TriviaFactory for Factory<'a> { type Trivia = PositionedTrivia<'a>; fn make(&mut self) -> Self::Trivia { Vec::new_in(self.arena) } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/positioned_value.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use super::positioned_trivia::PositionedTrivia; pub type PositionedValue<'a> = internal::PositionedValue<'a, usize>; pub type PositionedValueFullTrivia<'a> = internal::PositionedValue<'a, PositionedTrivia<'a>>; mod internal { use std::matches; use crate::lexable_token::LexableToken; use crate::syntax::SyntaxValueType; use crate::syntax::SyntaxValueWithKind; use crate::syntax_by_ref::positioned_token::internal::PositionedToken; use crate::syntax_by_ref::positioned_token::internal::TriviaRep; use crate::syntax_kind::SyntaxKind; use crate::token_kind::TokenKind; #[derive(Debug, Clone)] pub enum PositionedValue<'a, Trivia> { /// value for a token node is token itself TokenValue(PositionedToken<'a, Trivia>), /// value for a range denoted by pair of tokens TokenSpan(PositionedToken<'a, Trivia>, PositionedToken<'a, Trivia>), Missing { offset: usize, }, } impl<'a, Trivia> PositionedValue<'a, Trivia> where Trivia: TriviaRep + Clone, { pub fn width(&self) -> usize { match self { PositionedValue::TokenValue(t) => t.width(), PositionedValue::TokenSpan(left, right) => { (right.end_offset() - left.start_offset()) + 1 } PositionedValue::Missing { .. } => 0, } } pub fn start_offset(&self) -> usize { use PositionedValue::*; match &self { TokenValue(t) => t .leading_start_offset() .expect("invariant violation for Positioned Syntax"), TokenSpan(left, _) => left .leading_start_offset() .expect("invariant violation for Positioned Syntax"), Missing { offset, .. } => *offset, } } pub fn leading_width(&self) -> usize { use PositionedValue::*; match self { TokenValue(t) => t.leading_width(), TokenSpan(left, _) => left.leading_width(), Missing { .. } => 0, } } pub fn trailing_width(&self) -> usize { use PositionedValue::*; match self { TokenValue(t) => t.trailing_width(), TokenSpan(_, right) => right.trailing_width(), Missing { .. } => 0, } } pub fn leading_token(&self) -> Option<PositionedToken<'a, Trivia>> { use PositionedValue::*; match self { TokenValue(l) => Some(l.clone()), TokenSpan(left, _) => Some(left.clone()), _ => None, } } pub fn trailing_token(&self) -> Option<PositionedToken<'a, Trivia>> { use PositionedValue::*; match self { TokenValue(r) => Some(r.clone()), TokenSpan(_, right) => Some(right.clone()), _ => None, } } fn value_from_outer_children(first: &Self, last: &Self) -> Self { use PositionedValue::*; match (first, last) { (TokenValue(_), TokenValue(_)) | (TokenSpan(_, _), TokenValue(_)) | (TokenValue(_), TokenSpan(_, _)) | (TokenSpan(_, _), TokenSpan(_, _)) => { let l = first.leading_token().unwrap(); let r = last.trailing_token().unwrap(); if PositionedToken::inner_ptr_eq(&l, &r) { TokenValue(l) } else { TokenSpan(l, r) } } // can have two missing nodes if first and last child nodes of // the node are missing - this means that entire node is missing. // NOTE: offset must match otherwise it will mean that there is a real node // in between that should be picked instead (Missing { offset: o1 }, Missing { offset: o2 }) if o1 == o2 => first.clone(), _ => panic!(), } } } impl<'a, Trivia: 'a> SyntaxValueType<PositionedToken<'a, Trivia>> for PositionedValue<'a, Trivia> where Trivia: TriviaRep + Clone, { fn from_values<'b>(child_values: impl Iterator<Item = &'b Self>) -> Self where 'a: 'b, { use PositionedValue::*; let mut first = None; let mut first_non_zero = None; let mut last_non_zero = None; let mut last = None; for value in child_values { match (first.is_some(), first_non_zero.is_some(), value) { (false, false, TokenValue { .. }) | (false, false, TokenSpan { .. }) => { // first iteration and first node has some token representation - // record it as first, first_non_zero, last and last_non_zero first = Some(value); first_non_zero = Some(value); last_non_zero = Some(value); last = Some(value); } (false, false, Missing { .. }) => { // first iteration - first node is missing - // record it as first and last first = Some(value); first_non_zero = None; last_non_zero = None; last = Some(value); } (true, false, TokenValue { .. }) | (true, false, TokenSpan { .. }) => { // in progress, found first node that include tokens - // record it as first_non_zero, last and last_non_zero first_non_zero = Some(value); last_non_zero = Some(value); last = Some(value); } (true, true, TokenValue { .. }) | (true, true, TokenSpan { .. }) => { // in progress found some node that includes tokens - // record it as last_non_zero and last last_non_zero = Some(value); last = Some(value); } _ => { // in progress, stepped on missing node - // record it as last and move on last = Some(value); } } } match (first, first_non_zero, last_non_zero, last) { (_, Some(first_non_zero), Some(last_non_zero), _) => { Self::value_from_outer_children(first_non_zero, last_non_zero) } (Some(first), None, None, Some(last)) => { Self::value_from_outer_children(first, last) } _ => panic!("how did we get a node with no children in value_from_syntax?"), } } fn from_token(token: PositionedToken<'a, Trivia>) -> Self { if token.kind() == TokenKind::EndOfFile || token.full_width() == 0 { PositionedValue::Missing { offset: token.end_offset(), } } else { PositionedValue::TokenValue(token) } } fn from_children<'b>( _: SyntaxKind, offset: usize, nodes: impl Iterator<Item = &'b Self>, ) -> Self where 'a: 'b, { // We need to determine the offset, leading, middle and trailing widths of // the node to be constructed based on its children. If the children are // all of zero width -- including the case where there are no children at // all -- then we make a zero-width value at the given offset. // Otherwise, we can determine the associated value from the first and last // children that have width. let mut have_width = nodes.filter(|x| x.width() > 0).peekable(); match have_width.peek() { None => PositionedValue::Missing { offset }, Some(first) => Self::value_from_outer_children(first, have_width.last().unwrap()), } } } impl<'a, Trivia: std::fmt::Debug> SyntaxValueWithKind for PositionedValue<'a, Trivia> where Trivia: TriviaRep + Clone, { fn is_missing(&self) -> bool { matches!(self, PositionedValue::Missing { .. }) } fn token_kind(&self) -> Option<TokenKind> { match self { PositionedValue::TokenValue(pt) => Some(pt.kind()), _ => None, } } } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/serialize.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use serde::ser::SerializeSeq; use serde::ser::SerializeStruct; use serde::Serialize; use serde::Serializer; use super::positioned_token::PositionedTokenFullTrivia; use crate::indexed_source_text::IndexedSourceText; use crate::lexable_token::LexableToken; use crate::positioned_trivia::PositionedTrivium; pub struct WithContext<'a, T: ?Sized>(pub &'a IndexedSourceText<'a>, pub &'a T); impl<'a, T> WithContext<'a, T> { pub(crate) fn with<S: ?Sized>(&self, x: &'a S) -> WithContext<'a, S> { WithContext(self.0, x) } } impl<'a, T> Serialize for WithContext<'a, [T]> where WithContext<'a, T>: Serialize, { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut ss = s.serialize_seq(Some(self.1.len()))?; for i in self.1.iter() { ss.serialize_element(&WithContext(self.0, i))?; } ss.end() } } impl<'a> Serialize for WithContext<'a, PositionedTokenFullTrivia<'a>> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let offset = self.1.offset(); let width = self.1.width(); let token_offset = offset + self.1.leading_width(); let mut ss = s.serialize_struct("", 9)?; ss.serialize_field("kind", self.1.kind().to_string())?; ss.serialize_field("text", self.0.source_text().sub_as_str(token_offset, width))?; ss.serialize_field("offset", &offset)?; ss.serialize_field("leading_width", &self.1.leading_width())?; ss.serialize_field("width", &width)?; ss.serialize_field("trailing_width", &self.1.trailing_width())?; ss.serialize_field("leading", &self.with(self.1.clone_leading().as_slice()))?; ss.serialize_field("trailing", &self.with(self.1.clone_trailing().as_slice()))?; ss.serialize_field( "line_number", &self.0.offset_to_position(token_offset as isize).0, )?; ss.end() } } impl<'a> Serialize for WithContext<'a, PositionedTrivium> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", self.1.kind.to_string())?; ss.serialize_field( "text", self.0.source_text().sub_as_str(self.1.offset, self.1.width), )?; ss.serialize_field("offset", &self.1.offset)?; ss.serialize_field("width", &self.1.width)?; ss.end() } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::iter::empty; use std::iter::once; use bumpalo::collections::Vec; use itertools::Either::Left; use itertools::Either::Right; use super::has_arena::HasArena; use super::syntax_children_iterator::SyntaxChildrenIterator; use super::syntax_variant_generated::SyntaxVariant; use crate::lexable_token::LexableToken; use crate::syntax::SyntaxTypeBase; use crate::syntax::SyntaxValueType; use crate::syntax_kind::SyntaxKind; use crate::token_kind::TokenKind; #[derive(Debug, Clone)] pub struct Syntax<'a, T, V> { pub children: SyntaxVariant<'a, T, V>, pub value: V, } impl<'a, T, V> Syntax<'a, T, V> { pub fn make(t: SyntaxVariant<'a, T, V>, v: V) -> Self { Self { children: t, value: v, } } pub fn get_token(&self) -> Option<&T> { match &self.children { SyntaxVariant::Token(t) => Some(t), _ => None, } } #[allow(dead_code)] pub fn iter_children(&'a self) -> SyntaxChildrenIterator<'a, T, V> { self.children.iter_children() } pub fn syntax_node_to_list(&self) -> impl DoubleEndedIterator<Item = &Syntax<'a, T, V>> { match &self.children { SyntaxVariant::SyntaxList(x) => Left(x.iter()), SyntaxVariant::Missing => Right(Left(empty())), _ => Right(Right(once(self))), } } pub fn syntax_node_to_list_skip_separator( &self, ) -> impl DoubleEndedIterator<Item = &Syntax<'a, T, V>> { match &self.children { SyntaxVariant::SyntaxList(l) => Left(l.iter().map(|n| match &n.children { SyntaxVariant::ListItem(i) => &i.item, _ => n, })), SyntaxVariant::Missing => Right(Left(empty())), _ => Right(Right(once(self))), } } } impl<'a, T: Copy, V: SyntaxValueType<T>> Syntax<'a, T, V> { pub fn make_token(t: T) -> Self { let value = V::from_token(t); let syntax = SyntaxVariant::Token(t); Self::make(syntax, value) } pub fn make_missing(offset: usize) -> Self { let value = V::from_children(SyntaxKind::Missing, offset, empty()); let syntax = SyntaxVariant::Missing; Self::make(syntax, value) } } impl<'a, T: LexableToken, V> Syntax<'a, T, V> { fn is_specific_token(&self, kind: TokenKind) -> bool { match &self.children { SyntaxVariant::Token(t) => t.kind() == kind, _ => false, } } pub fn is_public(&self) -> bool { self.is_specific_token(TokenKind::Public) } pub fn is_private(&self) -> bool { self.is_specific_token(TokenKind::Private) } pub fn is_internal(&self) -> bool { self.is_specific_token(TokenKind::Internal) } pub fn is_protected(&self) -> bool { self.is_specific_token(TokenKind::Protected) } pub fn is_abstract(&self) -> bool { self.is_specific_token(TokenKind::Abstract) } pub fn is_static(&self) -> bool { self.is_specific_token(TokenKind::Static) } pub fn is_ampersand(&self) -> bool { self.is_specific_token(TokenKind::Ampersand) } pub fn is_ellipsis(&self) -> bool { self.is_specific_token(TokenKind::DotDotDot) } pub fn is_final(&self) -> bool { self.is_specific_token(TokenKind::Final) } pub fn is_xhp(&self) -> bool { self.is_specific_token(TokenKind::XHP) } pub fn is_async(&self) -> bool { self.is_specific_token(TokenKind::Async) } pub fn is_yield(&self) -> bool { self.is_specific_token(TokenKind::Yield) } pub fn is_construct(&self) -> bool { self.is_specific_token(TokenKind::Construct) } pub fn is_void(&self) -> bool { self.is_specific_token(TokenKind::Void) } pub fn is_left_brace(&self) -> bool { self.is_specific_token(TokenKind::LeftBrace) } pub fn is_comma(&self) -> bool { self.is_specific_token(TokenKind::Comma) } pub fn is_inout(&self) -> bool { self.is_specific_token(TokenKind::Inout) } pub fn is_this(&self) -> bool { self.is_specific_token(TokenKind::This) } pub fn is_name(&self) -> bool { self.is_specific_token(TokenKind::Name) } pub fn is_class(&self) -> bool { self.is_specific_token(TokenKind::Class) } pub fn is_as_expression(&self) -> bool { self.kind() == SyntaxKind::AsExpression } pub fn is_missing(&self) -> bool { self.kind() == SyntaxKind::Missing } pub fn is_external(&self) -> bool { self.is_specific_token(TokenKind::Semicolon) || self.is_missing() } pub fn is_readonly(&self) -> bool { self.is_specific_token(TokenKind::Readonly) } pub fn is_namespace_empty_body(&self) -> bool { self.kind() == SyntaxKind::NamespaceEmptyBody } pub fn is_attribute_specification(&self) -> bool { self.kind() == SyntaxKind::AttributeSpecification } pub fn is_old_attribute_specification(&self) -> bool { self.kind() == SyntaxKind::OldAttributeSpecification } pub fn is_file_attribute_specification(&self) -> bool { self.kind() == SyntaxKind::FileAttributeSpecification } pub fn is_return_statement(&self) -> bool { self.kind() == SyntaxKind::ReturnStatement } pub fn is_conditional_expression(&self) -> bool { self.kind() == SyntaxKind::ConditionalExpression } pub fn is_safe_member_selection_expression(&self) -> bool { self.kind() == SyntaxKind::SafeMemberSelectionExpression } pub fn is_object_creation_expression(&self) -> bool { self.kind() == SyntaxKind::ObjectCreationExpression } pub fn is_compound_statement(&self) -> bool { self.kind() == SyntaxKind::CompoundStatement } pub fn is_methodish_declaration(&self) -> bool { self.kind() == SyntaxKind::MethodishDeclaration } pub fn is_function_declaration(&self) -> bool { self.kind() == SyntaxKind::FunctionDeclaration } pub fn is_xhp_open(&self) -> bool { self.kind() == SyntaxKind::XHPOpen } pub fn is_braced_expression(&self) -> bool { self.kind() == SyntaxKind::BracedExpression } pub fn is_syntax_list(&self) -> bool { self.kind() == SyntaxKind::SyntaxList } pub fn is_namespace_prefix(&self) -> bool { if let SyntaxVariant::QualifiedName(x) = &self.children { x.parts .syntax_node_to_list() .last() .map_or(false, |p| match &p.children { SyntaxVariant::ListItem(x) => !&x.separator.is_missing(), _ => false, }) } else { false } } } impl<'a, C, T, V> SyntaxTypeBase<C> for Syntax<'a, T, V> where T: LexableToken + Copy, V: SyntaxValueType<T>, C: HasArena<'a>, { type Token = T; type Value = V; fn make_missing(_: &C, offset: usize) -> Self { Self::make_missing(offset) } fn make_token(_: &C, arg: T) -> Self { Self::make_token(arg) } fn make_list(ctx: &C, arg: std::vec::Vec<Self>, offset: usize) -> Self { // An empty list is represented by Missing; everything else is a // SyntaxList, even if the list has only one item. if arg.is_empty() { Self::make_missing(offset) } else { let mut list = Vec::with_capacity_in(arg.len(), ctx.get_arena()); list.extend(arg.into_iter()); let list = list.into_bump_slice(); let nodes = list.iter().map(|x| &x.value); let value = V::from_children(SyntaxKind::SyntaxList, offset, nodes); let syntax = SyntaxVariant::SyntaxList(list); Self::make(syntax, value) } } fn value(&self) -> &Self::Value { &self.value } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_children_iterator.rs
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use super::syntax::*; use super::syntax_variant_generated::SyntaxVariant; pub struct SyntaxChildrenIterator<'a, T, V> { pub syntax: &'a SyntaxVariant<'a, T, V>, pub index: usize, pub index_back: usize, } impl<'a, T, V> Iterator for SyntaxChildrenIterator<'a, T, V> { type Item = &'a Syntax<'a, T, V>; fn next(&mut self) -> Option<Self::Item> { self.next_impl(true) } } impl<'a, T, V> DoubleEndedIterator for SyntaxChildrenIterator<'a, T, V> { fn next_back(&mut self) -> Option<Self::Item> { self.next_impl(false) } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_children_iterator_generated.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ use super::{ syntax_children_iterator::*, syntax_variant_generated::*, syntax::* }; impl<'a, T, V> SyntaxChildrenIterator<'a, T, V> { pub fn next_impl(&mut self, direction : bool) -> Option<&'a Syntax<'a, T, V>> { use SyntaxVariant::*; let get_index = |len| { let back_index_plus_1 = len - self.index_back; if back_index_plus_1 <= self.index { return None } if direction { Some (self.index) } else { Some (back_index_plus_1 - 1) } }; let res = match self.syntax { Missing => None, Token (_) => None, SyntaxList(elems) => { get_index(elems.len()).and_then(|x| elems.get(x)) }, EndOfFile(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.token), _ => None, } }) }, Script(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.declarations), _ => None, } }) }, QualifiedName(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.parts), _ => None, } }) }, ModuleName(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.parts), _ => None, } }) }, SimpleTypeSpecifier(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.specifier), _ => None, } }) }, LiteralExpression(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.expression), _ => None, } }) }, PrefixedStringExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.str), _ => None, } }) }, PrefixedCodeExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.prefix), 1 => Some(&x.left_backtick), 2 => Some(&x.body), 3 => Some(&x.right_backtick), _ => None, } }) }, VariableExpression(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.expression), _ => None, } }) }, PipeVariableExpression(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.expression), _ => None, } }) }, FileAttributeSpecification(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.left_double_angle), 1 => Some(&x.keyword), 2 => Some(&x.colon), 3 => Some(&x.attributes), 4 => Some(&x.right_double_angle), _ => None, } }) }, EnumDeclaration(x) => { get_index(11).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.keyword), 3 => Some(&x.name), 4 => Some(&x.colon), 5 => Some(&x.base), 6 => Some(&x.type_), 7 => Some(&x.left_brace), 8 => Some(&x.use_clauses), 9 => Some(&x.enumerators), 10 => Some(&x.right_brace), _ => None, } }) }, EnumUse(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.names), 2 => Some(&x.semicolon), _ => None, } }) }, Enumerator(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.equal), 2 => Some(&x.value), 3 => Some(&x.semicolon), _ => None, } }) }, EnumClassDeclaration(x) => { get_index(12).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.enum_keyword), 3 => Some(&x.class_keyword), 4 => Some(&x.name), 5 => Some(&x.colon), 6 => Some(&x.base), 7 => Some(&x.extends), 8 => Some(&x.extends_list), 9 => Some(&x.left_brace), 10 => Some(&x.elements), 11 => Some(&x.right_brace), _ => None, } }) }, EnumClassEnumerator(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.modifiers), 1 => Some(&x.type_), 2 => Some(&x.name), 3 => Some(&x.initializer), 4 => Some(&x.semicolon), _ => None, } }) }, AliasDeclaration(x) => { get_index(10).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.module_kw_opt), 3 => Some(&x.keyword), 4 => Some(&x.name), 5 => Some(&x.generic_parameter), 6 => Some(&x.constraint), 7 => Some(&x.equal), 8 => Some(&x.type_), 9 => Some(&x.semicolon), _ => None, } }) }, ContextAliasDeclaration(x) => { get_index(8).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.keyword), 2 => Some(&x.name), 3 => Some(&x.generic_parameter), 4 => Some(&x.as_constraint), 5 => Some(&x.equal), 6 => Some(&x.context), 7 => Some(&x.semicolon), _ => None, } }) }, CaseTypeDeclaration(x) => { get_index(11).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.case_keyword), 3 => Some(&x.type_keyword), 4 => Some(&x.name), 5 => Some(&x.generic_parameter), 6 => Some(&x.as_), 7 => Some(&x.bounds), 8 => Some(&x.equal), 9 => Some(&x.variants), 10 => Some(&x.semicolon), _ => None, } }) }, CaseTypeVariant(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.bar), 1 => Some(&x.type_), _ => None, } }) }, PropertyDeclaration(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.type_), 3 => Some(&x.declarators), 4 => Some(&x.semicolon), _ => None, } }) }, PropertyDeclarator(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.initializer), _ => None, } }) }, NamespaceDeclaration(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.header), 1 => Some(&x.body), _ => None, } }) }, NamespaceDeclarationHeader(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.name), _ => None, } }) }, NamespaceBody(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.declarations), 2 => Some(&x.right_brace), _ => None, } }) }, NamespaceEmptyBody(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.semicolon), _ => None, } }) }, NamespaceUseDeclaration(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.kind), 2 => Some(&x.clauses), 3 => Some(&x.semicolon), _ => None, } }) }, NamespaceGroupUseDeclaration(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.kind), 2 => Some(&x.prefix), 3 => Some(&x.left_brace), 4 => Some(&x.clauses), 5 => Some(&x.right_brace), 6 => Some(&x.semicolon), _ => None, } }) }, NamespaceUseClause(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.clause_kind), 1 => Some(&x.name), 2 => Some(&x.as_), 3 => Some(&x.alias), _ => None, } }) }, FunctionDeclaration(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.declaration_header), 2 => Some(&x.body), _ => None, } }) }, FunctionDeclarationHeader(x) => { get_index(12).and_then(|index| { match index { 0 => Some(&x.modifiers), 1 => Some(&x.keyword), 2 => Some(&x.name), 3 => Some(&x.type_parameter_list), 4 => Some(&x.left_paren), 5 => Some(&x.parameter_list), 6 => Some(&x.right_paren), 7 => Some(&x.contexts), 8 => Some(&x.colon), 9 => Some(&x.readonly_return), 10 => Some(&x.type_), 11 => Some(&x.where_clause), _ => None, } }) }, Contexts(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_bracket), 1 => Some(&x.types), 2 => Some(&x.right_bracket), _ => None, } }) }, WhereClause(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.constraints), _ => None, } }) }, WhereConstraint(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_type), 1 => Some(&x.operator), 2 => Some(&x.right_type), _ => None, } }) }, MethodishDeclaration(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.attribute), 1 => Some(&x.function_decl_header), 2 => Some(&x.function_body), 3 => Some(&x.semicolon), _ => None, } }) }, MethodishTraitResolution(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.attribute), 1 => Some(&x.function_decl_header), 2 => Some(&x.equal), 3 => Some(&x.name), 4 => Some(&x.semicolon), _ => None, } }) }, ClassishDeclaration(x) => { get_index(12).and_then(|index| { match index { 0 => Some(&x.attribute), 1 => Some(&x.modifiers), 2 => Some(&x.xhp), 3 => Some(&x.keyword), 4 => Some(&x.name), 5 => Some(&x.type_parameters), 6 => Some(&x.extends_keyword), 7 => Some(&x.extends_list), 8 => Some(&x.implements_keyword), 9 => Some(&x.implements_list), 10 => Some(&x.where_clause), 11 => Some(&x.body), _ => None, } }) }, ClassishBody(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.elements), 2 => Some(&x.right_brace), _ => None, } }) }, TraitUse(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.names), 2 => Some(&x.semicolon), _ => None, } }) }, RequireClause(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.kind), 2 => Some(&x.name), 3 => Some(&x.semicolon), _ => None, } }) }, ConstDeclaration(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.keyword), 3 => Some(&x.type_specifier), 4 => Some(&x.declarators), 5 => Some(&x.semicolon), _ => None, } }) }, ConstantDeclarator(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.initializer), _ => None, } }) }, TypeConstDeclaration(x) => { get_index(10).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.modifiers), 2 => Some(&x.keyword), 3 => Some(&x.type_keyword), 4 => Some(&x.name), 5 => Some(&x.type_parameters), 6 => Some(&x.type_constraints), 7 => Some(&x.equal), 8 => Some(&x.type_specifier), 9 => Some(&x.semicolon), _ => None, } }) }, ContextConstDeclaration(x) => { get_index(9).and_then(|index| { match index { 0 => Some(&x.modifiers), 1 => Some(&x.const_keyword), 2 => Some(&x.ctx_keyword), 3 => Some(&x.name), 4 => Some(&x.type_parameters), 5 => Some(&x.constraint), 6 => Some(&x.equal), 7 => Some(&x.ctx_list), 8 => Some(&x.semicolon), _ => None, } }) }, DecoratedExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.decorator), 1 => Some(&x.expression), _ => None, } }) }, ParameterDeclaration(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.attribute), 1 => Some(&x.visibility), 2 => Some(&x.call_convention), 3 => Some(&x.readonly), 4 => Some(&x.type_), 5 => Some(&x.name), 6 => Some(&x.default_value), _ => None, } }) }, VariadicParameter(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.call_convention), 1 => Some(&x.type_), 2 => Some(&x.ellipsis), _ => None, } }) }, OldAttributeSpecification(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_double_angle), 1 => Some(&x.attributes), 2 => Some(&x.right_double_angle), _ => None, } }) }, AttributeSpecification(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.attributes), _ => None, } }) }, Attribute(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.at), 1 => Some(&x.attribute_name), _ => None, } }) }, InclusionExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.require), 1 => Some(&x.filename), _ => None, } }) }, InclusionDirective(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.expression), 1 => Some(&x.semicolon), _ => None, } }) }, CompoundStatement(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.statements), 2 => Some(&x.right_brace), _ => None, } }) }, ExpressionStatement(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.expression), 1 => Some(&x.semicolon), _ => None, } }) }, MarkupSection(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.hashbang), 1 => Some(&x.suffix), _ => None, } }) }, MarkupSuffix(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.less_than_question), 1 => Some(&x.name), _ => None, } }) }, UnsetStatement(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.variables), 3 => Some(&x.right_paren), 4 => Some(&x.semicolon), _ => None, } }) }, DeclareLocalStatement(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.variable), 2 => Some(&x.colon), 3 => Some(&x.type_), 4 => Some(&x.initializer), 5 => Some(&x.semicolon), _ => None, } }) }, UsingStatementBlockScoped(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.await_keyword), 1 => Some(&x.using_keyword), 2 => Some(&x.left_paren), 3 => Some(&x.expressions), 4 => Some(&x.right_paren), 5 => Some(&x.body), _ => None, } }) }, UsingStatementFunctionScoped(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.await_keyword), 1 => Some(&x.using_keyword), 2 => Some(&x.expression), 3 => Some(&x.semicolon), _ => None, } }) }, WhileStatement(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.condition), 3 => Some(&x.right_paren), 4 => Some(&x.body), _ => None, } }) }, IfStatement(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.condition), 3 => Some(&x.right_paren), 4 => Some(&x.statement), 5 => Some(&x.else_clause), _ => None, } }) }, ElseClause(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.statement), _ => None, } }) }, TryStatement(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.compound_statement), 2 => Some(&x.catch_clauses), 3 => Some(&x.finally_clause), _ => None, } }) }, CatchClause(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.type_), 3 => Some(&x.variable), 4 => Some(&x.right_paren), 5 => Some(&x.body), _ => None, } }) }, FinallyClause(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.body), _ => None, } }) }, DoStatement(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.body), 2 => Some(&x.while_keyword), 3 => Some(&x.left_paren), 4 => Some(&x.condition), 5 => Some(&x.right_paren), 6 => Some(&x.semicolon), _ => None, } }) }, ForStatement(x) => { get_index(9).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.initializer), 3 => Some(&x.first_semicolon), 4 => Some(&x.control), 5 => Some(&x.second_semicolon), 6 => Some(&x.end_of_loop), 7 => Some(&x.right_paren), 8 => Some(&x.body), _ => None, } }) }, ForeachStatement(x) => { get_index(10).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.collection), 3 => Some(&x.await_keyword), 4 => Some(&x.as_), 5 => Some(&x.key), 6 => Some(&x.arrow), 7 => Some(&x.value), 8 => Some(&x.right_paren), 9 => Some(&x.body), _ => None, } }) }, SwitchStatement(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.expression), 3 => Some(&x.right_paren), 4 => Some(&x.left_brace), 5 => Some(&x.sections), 6 => Some(&x.right_brace), _ => None, } }) }, SwitchSection(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.labels), 1 => Some(&x.statements), 2 => Some(&x.fallthrough), _ => None, } }) }, SwitchFallthrough(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.semicolon), _ => None, } }) }, CaseLabel(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.expression), 2 => Some(&x.colon), _ => None, } }) }, DefaultLabel(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.colon), _ => None, } }) }, MatchStatement(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.expression), 3 => Some(&x.right_paren), 4 => Some(&x.left_brace), 5 => Some(&x.arms), 6 => Some(&x.right_brace), _ => None, } }) }, MatchStatementArm(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.pattern), 1 => Some(&x.arrow), 2 => Some(&x.body), _ => None, } }) }, ReturnStatement(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.expression), 2 => Some(&x.semicolon), _ => None, } }) }, YieldBreakStatement(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.break_), 2 => Some(&x.semicolon), _ => None, } }) }, ThrowStatement(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.expression), 2 => Some(&x.semicolon), _ => None, } }) }, BreakStatement(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.semicolon), _ => None, } }) }, ContinueStatement(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.semicolon), _ => None, } }) }, EchoStatement(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.expressions), 2 => Some(&x.semicolon), _ => None, } }) }, ConcurrentStatement(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.statement), _ => None, } }) }, SimpleInitializer(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.equal), 1 => Some(&x.value), _ => None, } }) }, AnonymousClass(x) => { get_index(9).and_then(|index| { match index { 0 => Some(&x.class_keyword), 1 => Some(&x.left_paren), 2 => Some(&x.argument_list), 3 => Some(&x.right_paren), 4 => Some(&x.extends_keyword), 5 => Some(&x.extends_list), 6 => Some(&x.implements_keyword), 7 => Some(&x.implements_list), 8 => Some(&x.body), _ => None, } }) }, AnonymousFunction(x) => { get_index(12).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.async_keyword), 2 => Some(&x.function_keyword), 3 => Some(&x.left_paren), 4 => Some(&x.parameters), 5 => Some(&x.right_paren), 6 => Some(&x.ctx_list), 7 => Some(&x.colon), 8 => Some(&x.readonly_return), 9 => Some(&x.type_), 10 => Some(&x.use_), 11 => Some(&x.body), _ => None, } }) }, AnonymousFunctionUseClause(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.variables), 3 => Some(&x.right_paren), _ => None, } }) }, VariablePattern(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.variable), _ => None, } }) }, ConstructorPattern(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.constructor), 1 => Some(&x.left_paren), 2 => Some(&x.members), 3 => Some(&x.right_paren), _ => None, } }) }, RefinementPattern(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.variable), 1 => Some(&x.colon), 2 => Some(&x.specifier), _ => None, } }) }, LambdaExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.async_), 2 => Some(&x.signature), 3 => Some(&x.arrow), 4 => Some(&x.body), _ => None, } }) }, LambdaSignature(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.parameters), 2 => Some(&x.right_paren), 3 => Some(&x.contexts), 4 => Some(&x.colon), 5 => Some(&x.readonly_return), 6 => Some(&x.type_), _ => None, } }) }, CastExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.type_), 2 => Some(&x.right_paren), 3 => Some(&x.operand), _ => None, } }) }, ScopeResolutionExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.qualifier), 1 => Some(&x.operator), 2 => Some(&x.name), _ => None, } }) }, MemberSelectionExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.object), 1 => Some(&x.operator), 2 => Some(&x.name), _ => None, } }) }, SafeMemberSelectionExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.object), 1 => Some(&x.operator), 2 => Some(&x.name), _ => None, } }) }, EmbeddedMemberSelectionExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.object), 1 => Some(&x.operator), 2 => Some(&x.name), _ => None, } }) }, YieldExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.operand), _ => None, } }) }, PrefixUnaryExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.operator), 1 => Some(&x.operand), _ => None, } }) }, PostfixUnaryExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.operand), 1 => Some(&x.operator), _ => None, } }) }, BinaryExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_operand), 1 => Some(&x.operator), 2 => Some(&x.right_operand), _ => None, } }) }, IsExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_operand), 1 => Some(&x.operator), 2 => Some(&x.right_operand), _ => None, } }) }, AsExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_operand), 1 => Some(&x.operator), 2 => Some(&x.right_operand), _ => None, } }) }, NullableAsExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_operand), 1 => Some(&x.operator), 2 => Some(&x.right_operand), _ => None, } }) }, UpcastExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_operand), 1 => Some(&x.operator), 2 => Some(&x.right_operand), _ => None, } }) }, ConditionalExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.test), 1 => Some(&x.question), 2 => Some(&x.consequence), 3 => Some(&x.colon), 4 => Some(&x.alternative), _ => None, } }) }, EvalExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.argument), 3 => Some(&x.right_paren), _ => None, } }) }, IssetExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.argument_list), 3 => Some(&x.right_paren), _ => None, } }) }, FunctionCallExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.receiver), 1 => Some(&x.type_args), 2 => Some(&x.left_paren), 3 => Some(&x.argument_list), 4 => Some(&x.right_paren), _ => None, } }) }, FunctionPointerExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.receiver), 1 => Some(&x.type_args), _ => None, } }) }, ParenthesizedExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.expression), 2 => Some(&x.right_paren), _ => None, } }) }, BracedExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.expression), 2 => Some(&x.right_brace), _ => None, } }) }, ETSpliceExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.dollar), 1 => Some(&x.left_brace), 2 => Some(&x.expression), 3 => Some(&x.right_brace), _ => None, } }) }, EmbeddedBracedExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.expression), 2 => Some(&x.right_brace), _ => None, } }) }, ListExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.members), 3 => Some(&x.right_paren), _ => None, } }) }, CollectionLiteralExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.left_brace), 2 => Some(&x.initializers), 3 => Some(&x.right_brace), _ => None, } }) }, ObjectCreationExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.new_keyword), 1 => Some(&x.object), _ => None, } }) }, ConstructorCall(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.type_), 1 => Some(&x.left_paren), 2 => Some(&x.argument_list), 3 => Some(&x.right_paren), _ => None, } }) }, DarrayIntrinsicExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.explicit_type), 2 => Some(&x.left_bracket), 3 => Some(&x.members), 4 => Some(&x.right_bracket), _ => None, } }) }, DictionaryIntrinsicExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.explicit_type), 2 => Some(&x.left_bracket), 3 => Some(&x.members), 4 => Some(&x.right_bracket), _ => None, } }) }, KeysetIntrinsicExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.explicit_type), 2 => Some(&x.left_bracket), 3 => Some(&x.members), 4 => Some(&x.right_bracket), _ => None, } }) }, VarrayIntrinsicExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.explicit_type), 2 => Some(&x.left_bracket), 3 => Some(&x.members), 4 => Some(&x.right_bracket), _ => None, } }) }, VectorIntrinsicExpression(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.explicit_type), 2 => Some(&x.left_bracket), 3 => Some(&x.members), 4 => Some(&x.right_bracket), _ => None, } }) }, ElementInitializer(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.key), 1 => Some(&x.arrow), 2 => Some(&x.value), _ => None, } }) }, SubscriptExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.receiver), 1 => Some(&x.left_bracket), 2 => Some(&x.index), 3 => Some(&x.right_bracket), _ => None, } }) }, EmbeddedSubscriptExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.receiver), 1 => Some(&x.left_bracket), 2 => Some(&x.index), 3 => Some(&x.right_bracket), _ => None, } }) }, AwaitableCreationExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.async_), 2 => Some(&x.compound_statement), _ => None, } }) }, XHPChildrenDeclaration(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.expression), 2 => Some(&x.semicolon), _ => None, } }) }, XHPChildrenParenthesizedList(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.xhp_children), 2 => Some(&x.right_paren), _ => None, } }) }, XHPCategoryDeclaration(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.categories), 2 => Some(&x.semicolon), _ => None, } }) }, XHPEnumType(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.like), 1 => Some(&x.keyword), 2 => Some(&x.left_brace), 3 => Some(&x.values), 4 => Some(&x.right_brace), _ => None, } }) }, XHPLateinit(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.at), 1 => Some(&x.keyword), _ => None, } }) }, XHPRequired(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.at), 1 => Some(&x.keyword), _ => None, } }) }, XHPClassAttributeDeclaration(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.attributes), 2 => Some(&x.semicolon), _ => None, } }) }, XHPClassAttribute(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.type_), 1 => Some(&x.name), 2 => Some(&x.initializer), 3 => Some(&x.required), _ => None, } }) }, XHPSimpleClassAttribute(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.type_), _ => None, } }) }, XHPSimpleAttribute(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.equal), 2 => Some(&x.expression), _ => None, } }) }, XHPSpreadAttribute(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.left_brace), 1 => Some(&x.spread_operator), 2 => Some(&x.expression), 3 => Some(&x.right_brace), _ => None, } }) }, XHPOpen(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.left_angle), 1 => Some(&x.name), 2 => Some(&x.attributes), 3 => Some(&x.right_angle), _ => None, } }) }, XHPExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.open), 1 => Some(&x.body), 2 => Some(&x.close), _ => None, } }) }, XHPClose(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_angle), 1 => Some(&x.name), 2 => Some(&x.right_angle), _ => None, } }) }, TypeConstant(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_type), 1 => Some(&x.separator), 2 => Some(&x.right_type), _ => None, } }) }, VectorTypeSpecifier(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.type_), 3 => Some(&x.trailing_comma), 4 => Some(&x.right_angle), _ => None, } }) }, KeysetTypeSpecifier(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.type_), 3 => Some(&x.trailing_comma), 4 => Some(&x.right_angle), _ => None, } }) }, TupleTypeExplicitSpecifier(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.types), 3 => Some(&x.right_angle), _ => None, } }) }, VarrayTypeSpecifier(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.type_), 3 => Some(&x.trailing_comma), 4 => Some(&x.right_angle), _ => None, } }) }, FunctionCtxTypeSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.variable), _ => None, } }) }, TypeParameter(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.reified), 2 => Some(&x.variance), 3 => Some(&x.name), 4 => Some(&x.param_params), 5 => Some(&x.constraints), _ => None, } }) }, TypeConstraint(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.type_), _ => None, } }) }, ContextConstraint(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.ctx_list), _ => None, } }) }, DarrayTypeSpecifier(x) => { get_index(7).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.key), 3 => Some(&x.comma), 4 => Some(&x.value), 5 => Some(&x.trailing_comma), 6 => Some(&x.right_angle), _ => None, } }) }, DictionaryTypeSpecifier(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.members), 3 => Some(&x.right_angle), _ => None, } }) }, ClosureTypeSpecifier(x) => { get_index(11).and_then(|index| { match index { 0 => Some(&x.outer_left_paren), 1 => Some(&x.readonly_keyword), 2 => Some(&x.function_keyword), 3 => Some(&x.inner_left_paren), 4 => Some(&x.parameter_list), 5 => Some(&x.inner_right_paren), 6 => Some(&x.contexts), 7 => Some(&x.colon), 8 => Some(&x.readonly_return), 9 => Some(&x.return_type), 10 => Some(&x.outer_right_paren), _ => None, } }) }, ClosureParameterTypeSpecifier(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.call_convention), 1 => Some(&x.readonly), 2 => Some(&x.type_), _ => None, } }) }, TypeRefinement(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.type_), 1 => Some(&x.keyword), 2 => Some(&x.left_brace), 3 => Some(&x.members), 4 => Some(&x.right_brace), _ => None, } }) }, TypeInRefinement(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.name), 2 => Some(&x.type_parameters), 3 => Some(&x.constraints), 4 => Some(&x.equal), 5 => Some(&x.type_), _ => None, } }) }, CtxInRefinement(x) => { get_index(6).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.name), 2 => Some(&x.type_parameters), 3 => Some(&x.constraints), 4 => Some(&x.equal), 5 => Some(&x.ctx_list), _ => None, } }) }, ClassnameTypeSpecifier(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_angle), 2 => Some(&x.type_), 3 => Some(&x.trailing_comma), 4 => Some(&x.right_angle), _ => None, } }) }, FieldSpecifier(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.question), 1 => Some(&x.name), 2 => Some(&x.arrow), 3 => Some(&x.type_), _ => None, } }) }, FieldInitializer(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.name), 1 => Some(&x.arrow), 2 => Some(&x.value), _ => None, } }) }, ShapeTypeSpecifier(x) => { get_index(5).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.fields), 3 => Some(&x.ellipsis), 4 => Some(&x.right_paren), _ => None, } }) }, ShapeExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.fields), 3 => Some(&x.right_paren), _ => None, } }) }, TupleExpression(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.left_paren), 2 => Some(&x.items), 3 => Some(&x.right_paren), _ => None, } }) }, GenericTypeSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.class_type), 1 => Some(&x.argument_list), _ => None, } }) }, NullableTypeSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.question), 1 => Some(&x.type_), _ => None, } }) }, LikeTypeSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.tilde), 1 => Some(&x.type_), _ => None, } }) }, SoftTypeSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.at), 1 => Some(&x.type_), _ => None, } }) }, AttributizedSpecifier(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.type_), _ => None, } }) }, ReifiedTypeArgument(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.reified), 1 => Some(&x.type_), _ => None, } }) }, TypeArguments(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_angle), 1 => Some(&x.types), 2 => Some(&x.right_angle), _ => None, } }) }, TypeParameters(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_angle), 1 => Some(&x.parameters), 2 => Some(&x.right_angle), _ => None, } }) }, TupleTypeSpecifier(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.types), 2 => Some(&x.right_paren), _ => None, } }) }, UnionTypeSpecifier(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.types), 2 => Some(&x.right_paren), _ => None, } }) }, IntersectionTypeSpecifier(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.left_paren), 1 => Some(&x.types), 2 => Some(&x.right_paren), _ => None, } }) }, ErrorSyntax(x) => { get_index(1).and_then(|index| { match index { 0 => Some(&x.error), _ => None, } }) }, ListItem(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.item), 1 => Some(&x.separator), _ => None, } }) }, EnumClassLabelExpression(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.qualifier), 1 => Some(&x.hash), 2 => Some(&x.expression), _ => None, } }) }, ModuleDeclaration(x) => { get_index(8).and_then(|index| { match index { 0 => Some(&x.attribute_spec), 1 => Some(&x.new_keyword), 2 => Some(&x.module_keyword), 3 => Some(&x.name), 4 => Some(&x.left_brace), 5 => Some(&x.exports), 6 => Some(&x.imports), 7 => Some(&x.right_brace), _ => None, } }) }, ModuleExports(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.exports_keyword), 1 => Some(&x.left_brace), 2 => Some(&x.exports), 3 => Some(&x.right_brace), _ => None, } }) }, ModuleImports(x) => { get_index(4).and_then(|index| { match index { 0 => Some(&x.imports_keyword), 1 => Some(&x.left_brace), 2 => Some(&x.imports), 3 => Some(&x.right_brace), _ => None, } }) }, ModuleMembershipDeclaration(x) => { get_index(3).and_then(|index| { match index { 0 => Some(&x.module_keyword), 1 => Some(&x.name), 2 => Some(&x.semicolon), _ => None, } }) }, PackageExpression(x) => { get_index(2).and_then(|index| { match index { 0 => Some(&x.keyword), 1 => Some(&x.name), _ => None, } }) }, }; if res.is_some() { if direction { self.index = self.index + 1 } else { self.index_back = self.index_back + 1 } } res } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_impl_generated.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ use crate::{syntax_kind::SyntaxKind, lexable_token::LexableToken}; use super::{syntax::Syntax, syntax_variant_generated::SyntaxVariant}; impl<T: LexableToken, V> Syntax<'_, T, V> { pub fn kind(&self) -> SyntaxKind { match &self.children { SyntaxVariant::Missing => SyntaxKind::Missing, SyntaxVariant::Token (t) => SyntaxKind::Token(t.kind()), SyntaxVariant::SyntaxList (_) => SyntaxKind::SyntaxList, SyntaxVariant::EndOfFile {..} => SyntaxKind::EndOfFile, SyntaxVariant::Script {..} => SyntaxKind::Script, SyntaxVariant::QualifiedName {..} => SyntaxKind::QualifiedName, SyntaxVariant::ModuleName {..} => SyntaxKind::ModuleName, SyntaxVariant::SimpleTypeSpecifier {..} => SyntaxKind::SimpleTypeSpecifier, SyntaxVariant::LiteralExpression {..} => SyntaxKind::LiteralExpression, SyntaxVariant::PrefixedStringExpression {..} => SyntaxKind::PrefixedStringExpression, SyntaxVariant::PrefixedCodeExpression {..} => SyntaxKind::PrefixedCodeExpression, SyntaxVariant::VariableExpression {..} => SyntaxKind::VariableExpression, SyntaxVariant::PipeVariableExpression {..} => SyntaxKind::PipeVariableExpression, SyntaxVariant::FileAttributeSpecification {..} => SyntaxKind::FileAttributeSpecification, SyntaxVariant::EnumDeclaration {..} => SyntaxKind::EnumDeclaration, SyntaxVariant::EnumUse {..} => SyntaxKind::EnumUse, SyntaxVariant::Enumerator {..} => SyntaxKind::Enumerator, SyntaxVariant::EnumClassDeclaration {..} => SyntaxKind::EnumClassDeclaration, SyntaxVariant::EnumClassEnumerator {..} => SyntaxKind::EnumClassEnumerator, SyntaxVariant::AliasDeclaration {..} => SyntaxKind::AliasDeclaration, SyntaxVariant::ContextAliasDeclaration {..} => SyntaxKind::ContextAliasDeclaration, SyntaxVariant::CaseTypeDeclaration {..} => SyntaxKind::CaseTypeDeclaration, SyntaxVariant::CaseTypeVariant {..} => SyntaxKind::CaseTypeVariant, SyntaxVariant::PropertyDeclaration {..} => SyntaxKind::PropertyDeclaration, SyntaxVariant::PropertyDeclarator {..} => SyntaxKind::PropertyDeclarator, SyntaxVariant::NamespaceDeclaration {..} => SyntaxKind::NamespaceDeclaration, SyntaxVariant::NamespaceDeclarationHeader {..} => SyntaxKind::NamespaceDeclarationHeader, SyntaxVariant::NamespaceBody {..} => SyntaxKind::NamespaceBody, SyntaxVariant::NamespaceEmptyBody {..} => SyntaxKind::NamespaceEmptyBody, SyntaxVariant::NamespaceUseDeclaration {..} => SyntaxKind::NamespaceUseDeclaration, SyntaxVariant::NamespaceGroupUseDeclaration {..} => SyntaxKind::NamespaceGroupUseDeclaration, SyntaxVariant::NamespaceUseClause {..} => SyntaxKind::NamespaceUseClause, SyntaxVariant::FunctionDeclaration {..} => SyntaxKind::FunctionDeclaration, SyntaxVariant::FunctionDeclarationHeader {..} => SyntaxKind::FunctionDeclarationHeader, SyntaxVariant::Contexts {..} => SyntaxKind::Contexts, SyntaxVariant::WhereClause {..} => SyntaxKind::WhereClause, SyntaxVariant::WhereConstraint {..} => SyntaxKind::WhereConstraint, SyntaxVariant::MethodishDeclaration {..} => SyntaxKind::MethodishDeclaration, SyntaxVariant::MethodishTraitResolution {..} => SyntaxKind::MethodishTraitResolution, SyntaxVariant::ClassishDeclaration {..} => SyntaxKind::ClassishDeclaration, SyntaxVariant::ClassishBody {..} => SyntaxKind::ClassishBody, SyntaxVariant::TraitUse {..} => SyntaxKind::TraitUse, SyntaxVariant::RequireClause {..} => SyntaxKind::RequireClause, SyntaxVariant::ConstDeclaration {..} => SyntaxKind::ConstDeclaration, SyntaxVariant::ConstantDeclarator {..} => SyntaxKind::ConstantDeclarator, SyntaxVariant::TypeConstDeclaration {..} => SyntaxKind::TypeConstDeclaration, SyntaxVariant::ContextConstDeclaration {..} => SyntaxKind::ContextConstDeclaration, SyntaxVariant::DecoratedExpression {..} => SyntaxKind::DecoratedExpression, SyntaxVariant::ParameterDeclaration {..} => SyntaxKind::ParameterDeclaration, SyntaxVariant::VariadicParameter {..} => SyntaxKind::VariadicParameter, SyntaxVariant::OldAttributeSpecification {..} => SyntaxKind::OldAttributeSpecification, SyntaxVariant::AttributeSpecification {..} => SyntaxKind::AttributeSpecification, SyntaxVariant::Attribute {..} => SyntaxKind::Attribute, SyntaxVariant::InclusionExpression {..} => SyntaxKind::InclusionExpression, SyntaxVariant::InclusionDirective {..} => SyntaxKind::InclusionDirective, SyntaxVariant::CompoundStatement {..} => SyntaxKind::CompoundStatement, SyntaxVariant::ExpressionStatement {..} => SyntaxKind::ExpressionStatement, SyntaxVariant::MarkupSection {..} => SyntaxKind::MarkupSection, SyntaxVariant::MarkupSuffix {..} => SyntaxKind::MarkupSuffix, SyntaxVariant::UnsetStatement {..} => SyntaxKind::UnsetStatement, SyntaxVariant::DeclareLocalStatement {..} => SyntaxKind::DeclareLocalStatement, SyntaxVariant::UsingStatementBlockScoped {..} => SyntaxKind::UsingStatementBlockScoped, SyntaxVariant::UsingStatementFunctionScoped {..} => SyntaxKind::UsingStatementFunctionScoped, SyntaxVariant::WhileStatement {..} => SyntaxKind::WhileStatement, SyntaxVariant::IfStatement {..} => SyntaxKind::IfStatement, SyntaxVariant::ElseClause {..} => SyntaxKind::ElseClause, SyntaxVariant::TryStatement {..} => SyntaxKind::TryStatement, SyntaxVariant::CatchClause {..} => SyntaxKind::CatchClause, SyntaxVariant::FinallyClause {..} => SyntaxKind::FinallyClause, SyntaxVariant::DoStatement {..} => SyntaxKind::DoStatement, SyntaxVariant::ForStatement {..} => SyntaxKind::ForStatement, SyntaxVariant::ForeachStatement {..} => SyntaxKind::ForeachStatement, SyntaxVariant::SwitchStatement {..} => SyntaxKind::SwitchStatement, SyntaxVariant::SwitchSection {..} => SyntaxKind::SwitchSection, SyntaxVariant::SwitchFallthrough {..} => SyntaxKind::SwitchFallthrough, SyntaxVariant::CaseLabel {..} => SyntaxKind::CaseLabel, SyntaxVariant::DefaultLabel {..} => SyntaxKind::DefaultLabel, SyntaxVariant::MatchStatement {..} => SyntaxKind::MatchStatement, SyntaxVariant::MatchStatementArm {..} => SyntaxKind::MatchStatementArm, SyntaxVariant::ReturnStatement {..} => SyntaxKind::ReturnStatement, SyntaxVariant::YieldBreakStatement {..} => SyntaxKind::YieldBreakStatement, SyntaxVariant::ThrowStatement {..} => SyntaxKind::ThrowStatement, SyntaxVariant::BreakStatement {..} => SyntaxKind::BreakStatement, SyntaxVariant::ContinueStatement {..} => SyntaxKind::ContinueStatement, SyntaxVariant::EchoStatement {..} => SyntaxKind::EchoStatement, SyntaxVariant::ConcurrentStatement {..} => SyntaxKind::ConcurrentStatement, SyntaxVariant::SimpleInitializer {..} => SyntaxKind::SimpleInitializer, SyntaxVariant::AnonymousClass {..} => SyntaxKind::AnonymousClass, SyntaxVariant::AnonymousFunction {..} => SyntaxKind::AnonymousFunction, SyntaxVariant::AnonymousFunctionUseClause {..} => SyntaxKind::AnonymousFunctionUseClause, SyntaxVariant::VariablePattern {..} => SyntaxKind::VariablePattern, SyntaxVariant::ConstructorPattern {..} => SyntaxKind::ConstructorPattern, SyntaxVariant::RefinementPattern {..} => SyntaxKind::RefinementPattern, SyntaxVariant::LambdaExpression {..} => SyntaxKind::LambdaExpression, SyntaxVariant::LambdaSignature {..} => SyntaxKind::LambdaSignature, SyntaxVariant::CastExpression {..} => SyntaxKind::CastExpression, SyntaxVariant::ScopeResolutionExpression {..} => SyntaxKind::ScopeResolutionExpression, SyntaxVariant::MemberSelectionExpression {..} => SyntaxKind::MemberSelectionExpression, SyntaxVariant::SafeMemberSelectionExpression {..} => SyntaxKind::SafeMemberSelectionExpression, SyntaxVariant::EmbeddedMemberSelectionExpression {..} => SyntaxKind::EmbeddedMemberSelectionExpression, SyntaxVariant::YieldExpression {..} => SyntaxKind::YieldExpression, SyntaxVariant::PrefixUnaryExpression {..} => SyntaxKind::PrefixUnaryExpression, SyntaxVariant::PostfixUnaryExpression {..} => SyntaxKind::PostfixUnaryExpression, SyntaxVariant::BinaryExpression {..} => SyntaxKind::BinaryExpression, SyntaxVariant::IsExpression {..} => SyntaxKind::IsExpression, SyntaxVariant::AsExpression {..} => SyntaxKind::AsExpression, SyntaxVariant::NullableAsExpression {..} => SyntaxKind::NullableAsExpression, SyntaxVariant::UpcastExpression {..} => SyntaxKind::UpcastExpression, SyntaxVariant::ConditionalExpression {..} => SyntaxKind::ConditionalExpression, SyntaxVariant::EvalExpression {..} => SyntaxKind::EvalExpression, SyntaxVariant::IssetExpression {..} => SyntaxKind::IssetExpression, SyntaxVariant::FunctionCallExpression {..} => SyntaxKind::FunctionCallExpression, SyntaxVariant::FunctionPointerExpression {..} => SyntaxKind::FunctionPointerExpression, SyntaxVariant::ParenthesizedExpression {..} => SyntaxKind::ParenthesizedExpression, SyntaxVariant::BracedExpression {..} => SyntaxKind::BracedExpression, SyntaxVariant::ETSpliceExpression {..} => SyntaxKind::ETSpliceExpression, SyntaxVariant::EmbeddedBracedExpression {..} => SyntaxKind::EmbeddedBracedExpression, SyntaxVariant::ListExpression {..} => SyntaxKind::ListExpression, SyntaxVariant::CollectionLiteralExpression {..} => SyntaxKind::CollectionLiteralExpression, SyntaxVariant::ObjectCreationExpression {..} => SyntaxKind::ObjectCreationExpression, SyntaxVariant::ConstructorCall {..} => SyntaxKind::ConstructorCall, SyntaxVariant::DarrayIntrinsicExpression {..} => SyntaxKind::DarrayIntrinsicExpression, SyntaxVariant::DictionaryIntrinsicExpression {..} => SyntaxKind::DictionaryIntrinsicExpression, SyntaxVariant::KeysetIntrinsicExpression {..} => SyntaxKind::KeysetIntrinsicExpression, SyntaxVariant::VarrayIntrinsicExpression {..} => SyntaxKind::VarrayIntrinsicExpression, SyntaxVariant::VectorIntrinsicExpression {..} => SyntaxKind::VectorIntrinsicExpression, SyntaxVariant::ElementInitializer {..} => SyntaxKind::ElementInitializer, SyntaxVariant::SubscriptExpression {..} => SyntaxKind::SubscriptExpression, SyntaxVariant::EmbeddedSubscriptExpression {..} => SyntaxKind::EmbeddedSubscriptExpression, SyntaxVariant::AwaitableCreationExpression {..} => SyntaxKind::AwaitableCreationExpression, SyntaxVariant::XHPChildrenDeclaration {..} => SyntaxKind::XHPChildrenDeclaration, SyntaxVariant::XHPChildrenParenthesizedList {..} => SyntaxKind::XHPChildrenParenthesizedList, SyntaxVariant::XHPCategoryDeclaration {..} => SyntaxKind::XHPCategoryDeclaration, SyntaxVariant::XHPEnumType {..} => SyntaxKind::XHPEnumType, SyntaxVariant::XHPLateinit {..} => SyntaxKind::XHPLateinit, SyntaxVariant::XHPRequired {..} => SyntaxKind::XHPRequired, SyntaxVariant::XHPClassAttributeDeclaration {..} => SyntaxKind::XHPClassAttributeDeclaration, SyntaxVariant::XHPClassAttribute {..} => SyntaxKind::XHPClassAttribute, SyntaxVariant::XHPSimpleClassAttribute {..} => SyntaxKind::XHPSimpleClassAttribute, SyntaxVariant::XHPSimpleAttribute {..} => SyntaxKind::XHPSimpleAttribute, SyntaxVariant::XHPSpreadAttribute {..} => SyntaxKind::XHPSpreadAttribute, SyntaxVariant::XHPOpen {..} => SyntaxKind::XHPOpen, SyntaxVariant::XHPExpression {..} => SyntaxKind::XHPExpression, SyntaxVariant::XHPClose {..} => SyntaxKind::XHPClose, SyntaxVariant::TypeConstant {..} => SyntaxKind::TypeConstant, SyntaxVariant::VectorTypeSpecifier {..} => SyntaxKind::VectorTypeSpecifier, SyntaxVariant::KeysetTypeSpecifier {..} => SyntaxKind::KeysetTypeSpecifier, SyntaxVariant::TupleTypeExplicitSpecifier {..} => SyntaxKind::TupleTypeExplicitSpecifier, SyntaxVariant::VarrayTypeSpecifier {..} => SyntaxKind::VarrayTypeSpecifier, SyntaxVariant::FunctionCtxTypeSpecifier {..} => SyntaxKind::FunctionCtxTypeSpecifier, SyntaxVariant::TypeParameter {..} => SyntaxKind::TypeParameter, SyntaxVariant::TypeConstraint {..} => SyntaxKind::TypeConstraint, SyntaxVariant::ContextConstraint {..} => SyntaxKind::ContextConstraint, SyntaxVariant::DarrayTypeSpecifier {..} => SyntaxKind::DarrayTypeSpecifier, SyntaxVariant::DictionaryTypeSpecifier {..} => SyntaxKind::DictionaryTypeSpecifier, SyntaxVariant::ClosureTypeSpecifier {..} => SyntaxKind::ClosureTypeSpecifier, SyntaxVariant::ClosureParameterTypeSpecifier {..} => SyntaxKind::ClosureParameterTypeSpecifier, SyntaxVariant::TypeRefinement {..} => SyntaxKind::TypeRefinement, SyntaxVariant::TypeInRefinement {..} => SyntaxKind::TypeInRefinement, SyntaxVariant::CtxInRefinement {..} => SyntaxKind::CtxInRefinement, SyntaxVariant::ClassnameTypeSpecifier {..} => SyntaxKind::ClassnameTypeSpecifier, SyntaxVariant::FieldSpecifier {..} => SyntaxKind::FieldSpecifier, SyntaxVariant::FieldInitializer {..} => SyntaxKind::FieldInitializer, SyntaxVariant::ShapeTypeSpecifier {..} => SyntaxKind::ShapeTypeSpecifier, SyntaxVariant::ShapeExpression {..} => SyntaxKind::ShapeExpression, SyntaxVariant::TupleExpression {..} => SyntaxKind::TupleExpression, SyntaxVariant::GenericTypeSpecifier {..} => SyntaxKind::GenericTypeSpecifier, SyntaxVariant::NullableTypeSpecifier {..} => SyntaxKind::NullableTypeSpecifier, SyntaxVariant::LikeTypeSpecifier {..} => SyntaxKind::LikeTypeSpecifier, SyntaxVariant::SoftTypeSpecifier {..} => SyntaxKind::SoftTypeSpecifier, SyntaxVariant::AttributizedSpecifier {..} => SyntaxKind::AttributizedSpecifier, SyntaxVariant::ReifiedTypeArgument {..} => SyntaxKind::ReifiedTypeArgument, SyntaxVariant::TypeArguments {..} => SyntaxKind::TypeArguments, SyntaxVariant::TypeParameters {..} => SyntaxKind::TypeParameters, SyntaxVariant::TupleTypeSpecifier {..} => SyntaxKind::TupleTypeSpecifier, SyntaxVariant::UnionTypeSpecifier {..} => SyntaxKind::UnionTypeSpecifier, SyntaxVariant::IntersectionTypeSpecifier {..} => SyntaxKind::IntersectionTypeSpecifier, SyntaxVariant::ErrorSyntax {..} => SyntaxKind::ErrorSyntax, SyntaxVariant::ListItem {..} => SyntaxKind::ListItem, SyntaxVariant::EnumClassLabelExpression {..} => SyntaxKind::EnumClassLabelExpression, SyntaxVariant::ModuleDeclaration {..} => SyntaxKind::ModuleDeclaration, SyntaxVariant::ModuleExports {..} => SyntaxKind::ModuleExports, SyntaxVariant::ModuleImports {..} => SyntaxKind::ModuleImports, SyntaxVariant::ModuleMembershipDeclaration {..} => SyntaxKind::ModuleMembershipDeclaration, SyntaxVariant::PackageExpression {..} => SyntaxKind::PackageExpression, } } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_serialize_generated.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ use super::{serialize::WithContext, syntax::Syntax, syntax_variant_generated::*}; use serde::{ser::SerializeStruct, Serialize, Serializer}; impl<'a, T, V> Serialize for WithContext<'a, Syntax<'a, T, V>> where T: 'a, WithContext<'a, T>: Serialize, { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { match self.1.children { SyntaxVariant::Missing => { let mut ss = s.serialize_struct("", 1)?; ss.serialize_field("kind", "missing")?; ss.end() } SyntaxVariant::Token(ref t) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "token")?; ss.serialize_field("token", &self.with(t))?; ss.end() } SyntaxVariant::SyntaxList(l) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "list")?; ss.serialize_field("elements", &self.with(l))?; ss.end() } SyntaxVariant::EndOfFile (EndOfFileChildren{token} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "end_of_file")?; ss.serialize_field("end_of_file_token", &self.with(token))?; ss.end() } SyntaxVariant::Script (ScriptChildren{declarations} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "script")?; ss.serialize_field("script_declarations", &self.with(declarations))?; ss.end() } SyntaxVariant::QualifiedName (QualifiedNameChildren{parts} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "qualified_name")?; ss.serialize_field("qualified_name_parts", &self.with(parts))?; ss.end() } SyntaxVariant::ModuleName (ModuleNameChildren{parts} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "module_name")?; ss.serialize_field("module_name_parts", &self.with(parts))?; ss.end() } SyntaxVariant::SimpleTypeSpecifier (SimpleTypeSpecifierChildren{specifier} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "simple_type_specifier")?; ss.serialize_field("simple_type_specifier", &self.with(specifier))?; ss.end() } SyntaxVariant::LiteralExpression (LiteralExpressionChildren{expression} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "literal")?; ss.serialize_field("literal_expression", &self.with(expression))?; ss.end() } SyntaxVariant::PrefixedStringExpression (PrefixedStringExpressionChildren{name,str} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "prefixed_string")?; ss.serialize_field("prefixed_string_name", &self.with(name))?; ss.serialize_field("prefixed_string_str", &self.with(str))?; ss.end() } SyntaxVariant::PrefixedCodeExpression (PrefixedCodeExpressionChildren{prefix,left_backtick,body,right_backtick} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "prefixed_code")?; ss.serialize_field("prefixed_code_prefix", &self.with(prefix))?; ss.serialize_field("prefixed_code_left_backtick", &self.with(left_backtick))?; ss.serialize_field("prefixed_code_body", &self.with(body))?; ss.serialize_field("prefixed_code_right_backtick", &self.with(right_backtick))?; ss.end() } SyntaxVariant::VariableExpression (VariableExpressionChildren{expression} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "variable")?; ss.serialize_field("variable_expression", &self.with(expression))?; ss.end() } SyntaxVariant::PipeVariableExpression (PipeVariableExpressionChildren{expression} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "pipe_variable")?; ss.serialize_field("pipe_variable_expression", &self.with(expression))?; ss.end() } SyntaxVariant::FileAttributeSpecification (FileAttributeSpecificationChildren{left_double_angle,keyword,colon,attributes,right_double_angle} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "file_attribute_specification")?; ss.serialize_field("file_attribute_specification_left_double_angle", &self.with(left_double_angle))?; ss.serialize_field("file_attribute_specification_keyword", &self.with(keyword))?; ss.serialize_field("file_attribute_specification_colon", &self.with(colon))?; ss.serialize_field("file_attribute_specification_attributes", &self.with(attributes))?; ss.serialize_field("file_attribute_specification_right_double_angle", &self.with(right_double_angle))?; ss.end() } SyntaxVariant::EnumDeclaration (EnumDeclarationChildren{attribute_spec,modifiers,keyword,name,colon,base,type_,left_brace,use_clauses,enumerators,right_brace} ) => { let mut ss = s.serialize_struct("", 12)?; ss.serialize_field("kind", "enum_declaration")?; ss.serialize_field("enum_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("enum_modifiers", &self.with(modifiers))?; ss.serialize_field("enum_keyword", &self.with(keyword))?; ss.serialize_field("enum_name", &self.with(name))?; ss.serialize_field("enum_colon", &self.with(colon))?; ss.serialize_field("enum_base", &self.with(base))?; ss.serialize_field("enum_type", &self.with(type_))?; ss.serialize_field("enum_left_brace", &self.with(left_brace))?; ss.serialize_field("enum_use_clauses", &self.with(use_clauses))?; ss.serialize_field("enum_enumerators", &self.with(enumerators))?; ss.serialize_field("enum_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::EnumUse (EnumUseChildren{keyword,names,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "enum_use")?; ss.serialize_field("enum_use_keyword", &self.with(keyword))?; ss.serialize_field("enum_use_names", &self.with(names))?; ss.serialize_field("enum_use_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::Enumerator (EnumeratorChildren{name,equal,value,semicolon} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "enumerator")?; ss.serialize_field("enumerator_name", &self.with(name))?; ss.serialize_field("enumerator_equal", &self.with(equal))?; ss.serialize_field("enumerator_value", &self.with(value))?; ss.serialize_field("enumerator_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::EnumClassDeclaration (EnumClassDeclarationChildren{attribute_spec,modifiers,enum_keyword,class_keyword,name,colon,base,extends,extends_list,left_brace,elements,right_brace} ) => { let mut ss = s.serialize_struct("", 13)?; ss.serialize_field("kind", "enum_class_declaration")?; ss.serialize_field("enum_class_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("enum_class_modifiers", &self.with(modifiers))?; ss.serialize_field("enum_class_enum_keyword", &self.with(enum_keyword))?; ss.serialize_field("enum_class_class_keyword", &self.with(class_keyword))?; ss.serialize_field("enum_class_name", &self.with(name))?; ss.serialize_field("enum_class_colon", &self.with(colon))?; ss.serialize_field("enum_class_base", &self.with(base))?; ss.serialize_field("enum_class_extends", &self.with(extends))?; ss.serialize_field("enum_class_extends_list", &self.with(extends_list))?; ss.serialize_field("enum_class_left_brace", &self.with(left_brace))?; ss.serialize_field("enum_class_elements", &self.with(elements))?; ss.serialize_field("enum_class_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::EnumClassEnumerator (EnumClassEnumeratorChildren{modifiers,type_,name,initializer,semicolon} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "enum_class_enumerator")?; ss.serialize_field("enum_class_enumerator_modifiers", &self.with(modifiers))?; ss.serialize_field("enum_class_enumerator_type", &self.with(type_))?; ss.serialize_field("enum_class_enumerator_name", &self.with(name))?; ss.serialize_field("enum_class_enumerator_initializer", &self.with(initializer))?; ss.serialize_field("enum_class_enumerator_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::AliasDeclaration (AliasDeclarationChildren{attribute_spec,modifiers,module_kw_opt,keyword,name,generic_parameter,constraint,equal,type_,semicolon} ) => { let mut ss = s.serialize_struct("", 11)?; ss.serialize_field("kind", "alias_declaration")?; ss.serialize_field("alias_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("alias_modifiers", &self.with(modifiers))?; ss.serialize_field("alias_module_kw_opt", &self.with(module_kw_opt))?; ss.serialize_field("alias_keyword", &self.with(keyword))?; ss.serialize_field("alias_name", &self.with(name))?; ss.serialize_field("alias_generic_parameter", &self.with(generic_parameter))?; ss.serialize_field("alias_constraint", &self.with(constraint))?; ss.serialize_field("alias_equal", &self.with(equal))?; ss.serialize_field("alias_type", &self.with(type_))?; ss.serialize_field("alias_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ContextAliasDeclaration (ContextAliasDeclarationChildren{attribute_spec,keyword,name,generic_parameter,as_constraint,equal,context,semicolon} ) => { let mut ss = s.serialize_struct("", 9)?; ss.serialize_field("kind", "context_alias_declaration")?; ss.serialize_field("ctx_alias_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("ctx_alias_keyword", &self.with(keyword))?; ss.serialize_field("ctx_alias_name", &self.with(name))?; ss.serialize_field("ctx_alias_generic_parameter", &self.with(generic_parameter))?; ss.serialize_field("ctx_alias_as_constraint", &self.with(as_constraint))?; ss.serialize_field("ctx_alias_equal", &self.with(equal))?; ss.serialize_field("ctx_alias_context", &self.with(context))?; ss.serialize_field("ctx_alias_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::CaseTypeDeclaration (CaseTypeDeclarationChildren{attribute_spec,modifiers,case_keyword,type_keyword,name,generic_parameter,as_,bounds,equal,variants,semicolon} ) => { let mut ss = s.serialize_struct("", 12)?; ss.serialize_field("kind", "case_type_declaration")?; ss.serialize_field("case_type_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("case_type_modifiers", &self.with(modifiers))?; ss.serialize_field("case_type_case_keyword", &self.with(case_keyword))?; ss.serialize_field("case_type_type_keyword", &self.with(type_keyword))?; ss.serialize_field("case_type_name", &self.with(name))?; ss.serialize_field("case_type_generic_parameter", &self.with(generic_parameter))?; ss.serialize_field("case_type_as", &self.with(as_))?; ss.serialize_field("case_type_bounds", &self.with(bounds))?; ss.serialize_field("case_type_equal", &self.with(equal))?; ss.serialize_field("case_type_variants", &self.with(variants))?; ss.serialize_field("case_type_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::CaseTypeVariant (CaseTypeVariantChildren{bar,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "case_type_variant")?; ss.serialize_field("case_type_variant_bar", &self.with(bar))?; ss.serialize_field("case_type_variant_type", &self.with(type_))?; ss.end() } SyntaxVariant::PropertyDeclaration (PropertyDeclarationChildren{attribute_spec,modifiers,type_,declarators,semicolon} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "property_declaration")?; ss.serialize_field("property_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("property_modifiers", &self.with(modifiers))?; ss.serialize_field("property_type", &self.with(type_))?; ss.serialize_field("property_declarators", &self.with(declarators))?; ss.serialize_field("property_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::PropertyDeclarator (PropertyDeclaratorChildren{name,initializer} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "property_declarator")?; ss.serialize_field("property_name", &self.with(name))?; ss.serialize_field("property_initializer", &self.with(initializer))?; ss.end() } SyntaxVariant::NamespaceDeclaration (NamespaceDeclarationChildren{header,body} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "namespace_declaration")?; ss.serialize_field("namespace_header", &self.with(header))?; ss.serialize_field("namespace_body", &self.with(body))?; ss.end() } SyntaxVariant::NamespaceDeclarationHeader (NamespaceDeclarationHeaderChildren{keyword,name} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "namespace_declaration_header")?; ss.serialize_field("namespace_keyword", &self.with(keyword))?; ss.serialize_field("namespace_name", &self.with(name))?; ss.end() } SyntaxVariant::NamespaceBody (NamespaceBodyChildren{left_brace,declarations,right_brace} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "namespace_body")?; ss.serialize_field("namespace_left_brace", &self.with(left_brace))?; ss.serialize_field("namespace_declarations", &self.with(declarations))?; ss.serialize_field("namespace_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::NamespaceEmptyBody (NamespaceEmptyBodyChildren{semicolon} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "namespace_empty_body")?; ss.serialize_field("namespace_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::NamespaceUseDeclaration (NamespaceUseDeclarationChildren{keyword,kind,clauses,semicolon} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "namespace_use_declaration")?; ss.serialize_field("namespace_use_keyword", &self.with(keyword))?; ss.serialize_field("namespace_use_kind", &self.with(kind))?; ss.serialize_field("namespace_use_clauses", &self.with(clauses))?; ss.serialize_field("namespace_use_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::NamespaceGroupUseDeclaration (NamespaceGroupUseDeclarationChildren{keyword,kind,prefix,left_brace,clauses,right_brace,semicolon} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "namespace_group_use_declaration")?; ss.serialize_field("namespace_group_use_keyword", &self.with(keyword))?; ss.serialize_field("namespace_group_use_kind", &self.with(kind))?; ss.serialize_field("namespace_group_use_prefix", &self.with(prefix))?; ss.serialize_field("namespace_group_use_left_brace", &self.with(left_brace))?; ss.serialize_field("namespace_group_use_clauses", &self.with(clauses))?; ss.serialize_field("namespace_group_use_right_brace", &self.with(right_brace))?; ss.serialize_field("namespace_group_use_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::NamespaceUseClause (NamespaceUseClauseChildren{clause_kind,name,as_,alias} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "namespace_use_clause")?; ss.serialize_field("namespace_use_clause_kind", &self.with(clause_kind))?; ss.serialize_field("namespace_use_name", &self.with(name))?; ss.serialize_field("namespace_use_as", &self.with(as_))?; ss.serialize_field("namespace_use_alias", &self.with(alias))?; ss.end() } SyntaxVariant::FunctionDeclaration (FunctionDeclarationChildren{attribute_spec,declaration_header,body} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "function_declaration")?; ss.serialize_field("function_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("function_declaration_header", &self.with(declaration_header))?; ss.serialize_field("function_body", &self.with(body))?; ss.end() } SyntaxVariant::FunctionDeclarationHeader (FunctionDeclarationHeaderChildren{modifiers,keyword,name,type_parameter_list,left_paren,parameter_list,right_paren,contexts,colon,readonly_return,type_,where_clause} ) => { let mut ss = s.serialize_struct("", 13)?; ss.serialize_field("kind", "function_declaration_header")?; ss.serialize_field("function_modifiers", &self.with(modifiers))?; ss.serialize_field("function_keyword", &self.with(keyword))?; ss.serialize_field("function_name", &self.with(name))?; ss.serialize_field("function_type_parameter_list", &self.with(type_parameter_list))?; ss.serialize_field("function_left_paren", &self.with(left_paren))?; ss.serialize_field("function_parameter_list", &self.with(parameter_list))?; ss.serialize_field("function_right_paren", &self.with(right_paren))?; ss.serialize_field("function_contexts", &self.with(contexts))?; ss.serialize_field("function_colon", &self.with(colon))?; ss.serialize_field("function_readonly_return", &self.with(readonly_return))?; ss.serialize_field("function_type", &self.with(type_))?; ss.serialize_field("function_where_clause", &self.with(where_clause))?; ss.end() } SyntaxVariant::Contexts (ContextsChildren{left_bracket,types,right_bracket} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "contexts")?; ss.serialize_field("contexts_left_bracket", &self.with(left_bracket))?; ss.serialize_field("contexts_types", &self.with(types))?; ss.serialize_field("contexts_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::WhereClause (WhereClauseChildren{keyword,constraints} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "where_clause")?; ss.serialize_field("where_clause_keyword", &self.with(keyword))?; ss.serialize_field("where_clause_constraints", &self.with(constraints))?; ss.end() } SyntaxVariant::WhereConstraint (WhereConstraintChildren{left_type,operator,right_type} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "where_constraint")?; ss.serialize_field("where_constraint_left_type", &self.with(left_type))?; ss.serialize_field("where_constraint_operator", &self.with(operator))?; ss.serialize_field("where_constraint_right_type", &self.with(right_type))?; ss.end() } SyntaxVariant::MethodishDeclaration (MethodishDeclarationChildren{attribute,function_decl_header,function_body,semicolon} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "methodish_declaration")?; ss.serialize_field("methodish_attribute", &self.with(attribute))?; ss.serialize_field("methodish_function_decl_header", &self.with(function_decl_header))?; ss.serialize_field("methodish_function_body", &self.with(function_body))?; ss.serialize_field("methodish_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::MethodishTraitResolution (MethodishTraitResolutionChildren{attribute,function_decl_header,equal,name,semicolon} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "methodish_trait_resolution")?; ss.serialize_field("methodish_trait_attribute", &self.with(attribute))?; ss.serialize_field("methodish_trait_function_decl_header", &self.with(function_decl_header))?; ss.serialize_field("methodish_trait_equal", &self.with(equal))?; ss.serialize_field("methodish_trait_name", &self.with(name))?; ss.serialize_field("methodish_trait_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ClassishDeclaration (ClassishDeclarationChildren{attribute,modifiers,xhp,keyword,name,type_parameters,extends_keyword,extends_list,implements_keyword,implements_list,where_clause,body} ) => { let mut ss = s.serialize_struct("", 13)?; ss.serialize_field("kind", "classish_declaration")?; ss.serialize_field("classish_attribute", &self.with(attribute))?; ss.serialize_field("classish_modifiers", &self.with(modifiers))?; ss.serialize_field("classish_xhp", &self.with(xhp))?; ss.serialize_field("classish_keyword", &self.with(keyword))?; ss.serialize_field("classish_name", &self.with(name))?; ss.serialize_field("classish_type_parameters", &self.with(type_parameters))?; ss.serialize_field("classish_extends_keyword", &self.with(extends_keyword))?; ss.serialize_field("classish_extends_list", &self.with(extends_list))?; ss.serialize_field("classish_implements_keyword", &self.with(implements_keyword))?; ss.serialize_field("classish_implements_list", &self.with(implements_list))?; ss.serialize_field("classish_where_clause", &self.with(where_clause))?; ss.serialize_field("classish_body", &self.with(body))?; ss.end() } SyntaxVariant::ClassishBody (ClassishBodyChildren{left_brace,elements,right_brace} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "classish_body")?; ss.serialize_field("classish_body_left_brace", &self.with(left_brace))?; ss.serialize_field("classish_body_elements", &self.with(elements))?; ss.serialize_field("classish_body_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::TraitUse (TraitUseChildren{keyword,names,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "trait_use")?; ss.serialize_field("trait_use_keyword", &self.with(keyword))?; ss.serialize_field("trait_use_names", &self.with(names))?; ss.serialize_field("trait_use_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::RequireClause (RequireClauseChildren{keyword,kind,name,semicolon} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "require_clause")?; ss.serialize_field("require_keyword", &self.with(keyword))?; ss.serialize_field("require_kind", &self.with(kind))?; ss.serialize_field("require_name", &self.with(name))?; ss.serialize_field("require_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ConstDeclaration (ConstDeclarationChildren{attribute_spec,modifiers,keyword,type_specifier,declarators,semicolon} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "const_declaration")?; ss.serialize_field("const_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("const_modifiers", &self.with(modifiers))?; ss.serialize_field("const_keyword", &self.with(keyword))?; ss.serialize_field("const_type_specifier", &self.with(type_specifier))?; ss.serialize_field("const_declarators", &self.with(declarators))?; ss.serialize_field("const_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ConstantDeclarator (ConstantDeclaratorChildren{name,initializer} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "constant_declarator")?; ss.serialize_field("constant_declarator_name", &self.with(name))?; ss.serialize_field("constant_declarator_initializer", &self.with(initializer))?; ss.end() } SyntaxVariant::TypeConstDeclaration (TypeConstDeclarationChildren{attribute_spec,modifiers,keyword,type_keyword,name,type_parameters,type_constraints,equal,type_specifier,semicolon} ) => { let mut ss = s.serialize_struct("", 11)?; ss.serialize_field("kind", "type_const_declaration")?; ss.serialize_field("type_const_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("type_const_modifiers", &self.with(modifiers))?; ss.serialize_field("type_const_keyword", &self.with(keyword))?; ss.serialize_field("type_const_type_keyword", &self.with(type_keyword))?; ss.serialize_field("type_const_name", &self.with(name))?; ss.serialize_field("type_const_type_parameters", &self.with(type_parameters))?; ss.serialize_field("type_const_type_constraints", &self.with(type_constraints))?; ss.serialize_field("type_const_equal", &self.with(equal))?; ss.serialize_field("type_const_type_specifier", &self.with(type_specifier))?; ss.serialize_field("type_const_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ContextConstDeclaration (ContextConstDeclarationChildren{modifiers,const_keyword,ctx_keyword,name,type_parameters,constraint,equal,ctx_list,semicolon} ) => { let mut ss = s.serialize_struct("", 10)?; ss.serialize_field("kind", "context_const_declaration")?; ss.serialize_field("context_const_modifiers", &self.with(modifiers))?; ss.serialize_field("context_const_const_keyword", &self.with(const_keyword))?; ss.serialize_field("context_const_ctx_keyword", &self.with(ctx_keyword))?; ss.serialize_field("context_const_name", &self.with(name))?; ss.serialize_field("context_const_type_parameters", &self.with(type_parameters))?; ss.serialize_field("context_const_constraint", &self.with(constraint))?; ss.serialize_field("context_const_equal", &self.with(equal))?; ss.serialize_field("context_const_ctx_list", &self.with(ctx_list))?; ss.serialize_field("context_const_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::DecoratedExpression (DecoratedExpressionChildren{decorator,expression} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "decorated_expression")?; ss.serialize_field("decorated_expression_decorator", &self.with(decorator))?; ss.serialize_field("decorated_expression_expression", &self.with(expression))?; ss.end() } SyntaxVariant::ParameterDeclaration (ParameterDeclarationChildren{attribute,visibility,call_convention,readonly,type_,name,default_value} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "parameter_declaration")?; ss.serialize_field("parameter_attribute", &self.with(attribute))?; ss.serialize_field("parameter_visibility", &self.with(visibility))?; ss.serialize_field("parameter_call_convention", &self.with(call_convention))?; ss.serialize_field("parameter_readonly", &self.with(readonly))?; ss.serialize_field("parameter_type", &self.with(type_))?; ss.serialize_field("parameter_name", &self.with(name))?; ss.serialize_field("parameter_default_value", &self.with(default_value))?; ss.end() } SyntaxVariant::VariadicParameter (VariadicParameterChildren{call_convention,type_,ellipsis} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "variadic_parameter")?; ss.serialize_field("variadic_parameter_call_convention", &self.with(call_convention))?; ss.serialize_field("variadic_parameter_type", &self.with(type_))?; ss.serialize_field("variadic_parameter_ellipsis", &self.with(ellipsis))?; ss.end() } SyntaxVariant::OldAttributeSpecification (OldAttributeSpecificationChildren{left_double_angle,attributes,right_double_angle} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "old_attribute_specification")?; ss.serialize_field("old_attribute_specification_left_double_angle", &self.with(left_double_angle))?; ss.serialize_field("old_attribute_specification_attributes", &self.with(attributes))?; ss.serialize_field("old_attribute_specification_right_double_angle", &self.with(right_double_angle))?; ss.end() } SyntaxVariant::AttributeSpecification (AttributeSpecificationChildren{attributes} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "attribute_specification")?; ss.serialize_field("attribute_specification_attributes", &self.with(attributes))?; ss.end() } SyntaxVariant::Attribute (AttributeChildren{at,attribute_name} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "attribute")?; ss.serialize_field("attribute_at", &self.with(at))?; ss.serialize_field("attribute_attribute_name", &self.with(attribute_name))?; ss.end() } SyntaxVariant::InclusionExpression (InclusionExpressionChildren{require,filename} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "inclusion_expression")?; ss.serialize_field("inclusion_require", &self.with(require))?; ss.serialize_field("inclusion_filename", &self.with(filename))?; ss.end() } SyntaxVariant::InclusionDirective (InclusionDirectiveChildren{expression,semicolon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "inclusion_directive")?; ss.serialize_field("inclusion_expression", &self.with(expression))?; ss.serialize_field("inclusion_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::CompoundStatement (CompoundStatementChildren{left_brace,statements,right_brace} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "compound_statement")?; ss.serialize_field("compound_left_brace", &self.with(left_brace))?; ss.serialize_field("compound_statements", &self.with(statements))?; ss.serialize_field("compound_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ExpressionStatement (ExpressionStatementChildren{expression,semicolon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "expression_statement")?; ss.serialize_field("expression_statement_expression", &self.with(expression))?; ss.serialize_field("expression_statement_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::MarkupSection (MarkupSectionChildren{hashbang,suffix} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "markup_section")?; ss.serialize_field("markup_hashbang", &self.with(hashbang))?; ss.serialize_field("markup_suffix", &self.with(suffix))?; ss.end() } SyntaxVariant::MarkupSuffix (MarkupSuffixChildren{less_than_question,name} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "markup_suffix")?; ss.serialize_field("markup_suffix_less_than_question", &self.with(less_than_question))?; ss.serialize_field("markup_suffix_name", &self.with(name))?; ss.end() } SyntaxVariant::UnsetStatement (UnsetStatementChildren{keyword,left_paren,variables,right_paren,semicolon} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "unset_statement")?; ss.serialize_field("unset_keyword", &self.with(keyword))?; ss.serialize_field("unset_left_paren", &self.with(left_paren))?; ss.serialize_field("unset_variables", &self.with(variables))?; ss.serialize_field("unset_right_paren", &self.with(right_paren))?; ss.serialize_field("unset_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::DeclareLocalStatement (DeclareLocalStatementChildren{keyword,variable,colon,type_,initializer,semicolon} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "declare_local_statement")?; ss.serialize_field("declare_local_keyword", &self.with(keyword))?; ss.serialize_field("declare_local_variable", &self.with(variable))?; ss.serialize_field("declare_local_colon", &self.with(colon))?; ss.serialize_field("declare_local_type", &self.with(type_))?; ss.serialize_field("declare_local_initializer", &self.with(initializer))?; ss.serialize_field("declare_local_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::UsingStatementBlockScoped (UsingStatementBlockScopedChildren{await_keyword,using_keyword,left_paren,expressions,right_paren,body} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "using_statement_block_scoped")?; ss.serialize_field("using_block_await_keyword", &self.with(await_keyword))?; ss.serialize_field("using_block_using_keyword", &self.with(using_keyword))?; ss.serialize_field("using_block_left_paren", &self.with(left_paren))?; ss.serialize_field("using_block_expressions", &self.with(expressions))?; ss.serialize_field("using_block_right_paren", &self.with(right_paren))?; ss.serialize_field("using_block_body", &self.with(body))?; ss.end() } SyntaxVariant::UsingStatementFunctionScoped (UsingStatementFunctionScopedChildren{await_keyword,using_keyword,expression,semicolon} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "using_statement_function_scoped")?; ss.serialize_field("using_function_await_keyword", &self.with(await_keyword))?; ss.serialize_field("using_function_using_keyword", &self.with(using_keyword))?; ss.serialize_field("using_function_expression", &self.with(expression))?; ss.serialize_field("using_function_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::WhileStatement (WhileStatementChildren{keyword,left_paren,condition,right_paren,body} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "while_statement")?; ss.serialize_field("while_keyword", &self.with(keyword))?; ss.serialize_field("while_left_paren", &self.with(left_paren))?; ss.serialize_field("while_condition", &self.with(condition))?; ss.serialize_field("while_right_paren", &self.with(right_paren))?; ss.serialize_field("while_body", &self.with(body))?; ss.end() } SyntaxVariant::IfStatement (IfStatementChildren{keyword,left_paren,condition,right_paren,statement,else_clause} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "if_statement")?; ss.serialize_field("if_keyword", &self.with(keyword))?; ss.serialize_field("if_left_paren", &self.with(left_paren))?; ss.serialize_field("if_condition", &self.with(condition))?; ss.serialize_field("if_right_paren", &self.with(right_paren))?; ss.serialize_field("if_statement", &self.with(statement))?; ss.serialize_field("if_else_clause", &self.with(else_clause))?; ss.end() } SyntaxVariant::ElseClause (ElseClauseChildren{keyword,statement} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "else_clause")?; ss.serialize_field("else_keyword", &self.with(keyword))?; ss.serialize_field("else_statement", &self.with(statement))?; ss.end() } SyntaxVariant::TryStatement (TryStatementChildren{keyword,compound_statement,catch_clauses,finally_clause} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "try_statement")?; ss.serialize_field("try_keyword", &self.with(keyword))?; ss.serialize_field("try_compound_statement", &self.with(compound_statement))?; ss.serialize_field("try_catch_clauses", &self.with(catch_clauses))?; ss.serialize_field("try_finally_clause", &self.with(finally_clause))?; ss.end() } SyntaxVariant::CatchClause (CatchClauseChildren{keyword,left_paren,type_,variable,right_paren,body} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "catch_clause")?; ss.serialize_field("catch_keyword", &self.with(keyword))?; ss.serialize_field("catch_left_paren", &self.with(left_paren))?; ss.serialize_field("catch_type", &self.with(type_))?; ss.serialize_field("catch_variable", &self.with(variable))?; ss.serialize_field("catch_right_paren", &self.with(right_paren))?; ss.serialize_field("catch_body", &self.with(body))?; ss.end() } SyntaxVariant::FinallyClause (FinallyClauseChildren{keyword,body} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "finally_clause")?; ss.serialize_field("finally_keyword", &self.with(keyword))?; ss.serialize_field("finally_body", &self.with(body))?; ss.end() } SyntaxVariant::DoStatement (DoStatementChildren{keyword,body,while_keyword,left_paren,condition,right_paren,semicolon} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "do_statement")?; ss.serialize_field("do_keyword", &self.with(keyword))?; ss.serialize_field("do_body", &self.with(body))?; ss.serialize_field("do_while_keyword", &self.with(while_keyword))?; ss.serialize_field("do_left_paren", &self.with(left_paren))?; ss.serialize_field("do_condition", &self.with(condition))?; ss.serialize_field("do_right_paren", &self.with(right_paren))?; ss.serialize_field("do_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ForStatement (ForStatementChildren{keyword,left_paren,initializer,first_semicolon,control,second_semicolon,end_of_loop,right_paren,body} ) => { let mut ss = s.serialize_struct("", 10)?; ss.serialize_field("kind", "for_statement")?; ss.serialize_field("for_keyword", &self.with(keyword))?; ss.serialize_field("for_left_paren", &self.with(left_paren))?; ss.serialize_field("for_initializer", &self.with(initializer))?; ss.serialize_field("for_first_semicolon", &self.with(first_semicolon))?; ss.serialize_field("for_control", &self.with(control))?; ss.serialize_field("for_second_semicolon", &self.with(second_semicolon))?; ss.serialize_field("for_end_of_loop", &self.with(end_of_loop))?; ss.serialize_field("for_right_paren", &self.with(right_paren))?; ss.serialize_field("for_body", &self.with(body))?; ss.end() } SyntaxVariant::ForeachStatement (ForeachStatementChildren{keyword,left_paren,collection,await_keyword,as_,key,arrow,value,right_paren,body} ) => { let mut ss = s.serialize_struct("", 11)?; ss.serialize_field("kind", "foreach_statement")?; ss.serialize_field("foreach_keyword", &self.with(keyword))?; ss.serialize_field("foreach_left_paren", &self.with(left_paren))?; ss.serialize_field("foreach_collection", &self.with(collection))?; ss.serialize_field("foreach_await_keyword", &self.with(await_keyword))?; ss.serialize_field("foreach_as", &self.with(as_))?; ss.serialize_field("foreach_key", &self.with(key))?; ss.serialize_field("foreach_arrow", &self.with(arrow))?; ss.serialize_field("foreach_value", &self.with(value))?; ss.serialize_field("foreach_right_paren", &self.with(right_paren))?; ss.serialize_field("foreach_body", &self.with(body))?; ss.end() } SyntaxVariant::SwitchStatement (SwitchStatementChildren{keyword,left_paren,expression,right_paren,left_brace,sections,right_brace} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "switch_statement")?; ss.serialize_field("switch_keyword", &self.with(keyword))?; ss.serialize_field("switch_left_paren", &self.with(left_paren))?; ss.serialize_field("switch_expression", &self.with(expression))?; ss.serialize_field("switch_right_paren", &self.with(right_paren))?; ss.serialize_field("switch_left_brace", &self.with(left_brace))?; ss.serialize_field("switch_sections", &self.with(sections))?; ss.serialize_field("switch_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::SwitchSection (SwitchSectionChildren{labels,statements,fallthrough} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "switch_section")?; ss.serialize_field("switch_section_labels", &self.with(labels))?; ss.serialize_field("switch_section_statements", &self.with(statements))?; ss.serialize_field("switch_section_fallthrough", &self.with(fallthrough))?; ss.end() } SyntaxVariant::SwitchFallthrough (SwitchFallthroughChildren{keyword,semicolon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "switch_fallthrough")?; ss.serialize_field("fallthrough_keyword", &self.with(keyword))?; ss.serialize_field("fallthrough_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::CaseLabel (CaseLabelChildren{keyword,expression,colon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "case_label")?; ss.serialize_field("case_keyword", &self.with(keyword))?; ss.serialize_field("case_expression", &self.with(expression))?; ss.serialize_field("case_colon", &self.with(colon))?; ss.end() } SyntaxVariant::DefaultLabel (DefaultLabelChildren{keyword,colon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "default_label")?; ss.serialize_field("default_keyword", &self.with(keyword))?; ss.serialize_field("default_colon", &self.with(colon))?; ss.end() } SyntaxVariant::MatchStatement (MatchStatementChildren{keyword,left_paren,expression,right_paren,left_brace,arms,right_brace} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "match_statement")?; ss.serialize_field("match_statement_keyword", &self.with(keyword))?; ss.serialize_field("match_statement_left_paren", &self.with(left_paren))?; ss.serialize_field("match_statement_expression", &self.with(expression))?; ss.serialize_field("match_statement_right_paren", &self.with(right_paren))?; ss.serialize_field("match_statement_left_brace", &self.with(left_brace))?; ss.serialize_field("match_statement_arms", &self.with(arms))?; ss.serialize_field("match_statement_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::MatchStatementArm (MatchStatementArmChildren{pattern,arrow,body} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "match_statement_arm")?; ss.serialize_field("match_statement_arm_pattern", &self.with(pattern))?; ss.serialize_field("match_statement_arm_arrow", &self.with(arrow))?; ss.serialize_field("match_statement_arm_body", &self.with(body))?; ss.end() } SyntaxVariant::ReturnStatement (ReturnStatementChildren{keyword,expression,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "return_statement")?; ss.serialize_field("return_keyword", &self.with(keyword))?; ss.serialize_field("return_expression", &self.with(expression))?; ss.serialize_field("return_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::YieldBreakStatement (YieldBreakStatementChildren{keyword,break_,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "yield_break_statement")?; ss.serialize_field("yield_break_keyword", &self.with(keyword))?; ss.serialize_field("yield_break_break", &self.with(break_))?; ss.serialize_field("yield_break_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ThrowStatement (ThrowStatementChildren{keyword,expression,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "throw_statement")?; ss.serialize_field("throw_keyword", &self.with(keyword))?; ss.serialize_field("throw_expression", &self.with(expression))?; ss.serialize_field("throw_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::BreakStatement (BreakStatementChildren{keyword,semicolon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "break_statement")?; ss.serialize_field("break_keyword", &self.with(keyword))?; ss.serialize_field("break_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ContinueStatement (ContinueStatementChildren{keyword,semicolon} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "continue_statement")?; ss.serialize_field("continue_keyword", &self.with(keyword))?; ss.serialize_field("continue_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::EchoStatement (EchoStatementChildren{keyword,expressions,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "echo_statement")?; ss.serialize_field("echo_keyword", &self.with(keyword))?; ss.serialize_field("echo_expressions", &self.with(expressions))?; ss.serialize_field("echo_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::ConcurrentStatement (ConcurrentStatementChildren{keyword,statement} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "concurrent_statement")?; ss.serialize_field("concurrent_keyword", &self.with(keyword))?; ss.serialize_field("concurrent_statement", &self.with(statement))?; ss.end() } SyntaxVariant::SimpleInitializer (SimpleInitializerChildren{equal,value} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "simple_initializer")?; ss.serialize_field("simple_initializer_equal", &self.with(equal))?; ss.serialize_field("simple_initializer_value", &self.with(value))?; ss.end() } SyntaxVariant::AnonymousClass (AnonymousClassChildren{class_keyword,left_paren,argument_list,right_paren,extends_keyword,extends_list,implements_keyword,implements_list,body} ) => { let mut ss = s.serialize_struct("", 10)?; ss.serialize_field("kind", "anonymous_class")?; ss.serialize_field("anonymous_class_class_keyword", &self.with(class_keyword))?; ss.serialize_field("anonymous_class_left_paren", &self.with(left_paren))?; ss.serialize_field("anonymous_class_argument_list", &self.with(argument_list))?; ss.serialize_field("anonymous_class_right_paren", &self.with(right_paren))?; ss.serialize_field("anonymous_class_extends_keyword", &self.with(extends_keyword))?; ss.serialize_field("anonymous_class_extends_list", &self.with(extends_list))?; ss.serialize_field("anonymous_class_implements_keyword", &self.with(implements_keyword))?; ss.serialize_field("anonymous_class_implements_list", &self.with(implements_list))?; ss.serialize_field("anonymous_class_body", &self.with(body))?; ss.end() } SyntaxVariant::AnonymousFunction (AnonymousFunctionChildren{attribute_spec,async_keyword,function_keyword,left_paren,parameters,right_paren,ctx_list,colon,readonly_return,type_,use_,body} ) => { let mut ss = s.serialize_struct("", 13)?; ss.serialize_field("kind", "anonymous_function")?; ss.serialize_field("anonymous_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("anonymous_async_keyword", &self.with(async_keyword))?; ss.serialize_field("anonymous_function_keyword", &self.with(function_keyword))?; ss.serialize_field("anonymous_left_paren", &self.with(left_paren))?; ss.serialize_field("anonymous_parameters", &self.with(parameters))?; ss.serialize_field("anonymous_right_paren", &self.with(right_paren))?; ss.serialize_field("anonymous_ctx_list", &self.with(ctx_list))?; ss.serialize_field("anonymous_colon", &self.with(colon))?; ss.serialize_field("anonymous_readonly_return", &self.with(readonly_return))?; ss.serialize_field("anonymous_type", &self.with(type_))?; ss.serialize_field("anonymous_use", &self.with(use_))?; ss.serialize_field("anonymous_body", &self.with(body))?; ss.end() } SyntaxVariant::AnonymousFunctionUseClause (AnonymousFunctionUseClauseChildren{keyword,left_paren,variables,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "anonymous_function_use_clause")?; ss.serialize_field("anonymous_use_keyword", &self.with(keyword))?; ss.serialize_field("anonymous_use_left_paren", &self.with(left_paren))?; ss.serialize_field("anonymous_use_variables", &self.with(variables))?; ss.serialize_field("anonymous_use_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::VariablePattern (VariablePatternChildren{variable} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "variable_pattern")?; ss.serialize_field("variable_pattern_variable", &self.with(variable))?; ss.end() } SyntaxVariant::ConstructorPattern (ConstructorPatternChildren{constructor,left_paren,members,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "constructor_pattern")?; ss.serialize_field("constructor_pattern_constructor", &self.with(constructor))?; ss.serialize_field("constructor_pattern_left_paren", &self.with(left_paren))?; ss.serialize_field("constructor_pattern_members", &self.with(members))?; ss.serialize_field("constructor_pattern_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::RefinementPattern (RefinementPatternChildren{variable,colon,specifier} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "refinement_pattern")?; ss.serialize_field("refinement_pattern_variable", &self.with(variable))?; ss.serialize_field("refinement_pattern_colon", &self.with(colon))?; ss.serialize_field("refinement_pattern_specifier", &self.with(specifier))?; ss.end() } SyntaxVariant::LambdaExpression (LambdaExpressionChildren{attribute_spec,async_,signature,arrow,body} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "lambda_expression")?; ss.serialize_field("lambda_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("lambda_async", &self.with(async_))?; ss.serialize_field("lambda_signature", &self.with(signature))?; ss.serialize_field("lambda_arrow", &self.with(arrow))?; ss.serialize_field("lambda_body", &self.with(body))?; ss.end() } SyntaxVariant::LambdaSignature (LambdaSignatureChildren{left_paren,parameters,right_paren,contexts,colon,readonly_return,type_} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "lambda_signature")?; ss.serialize_field("lambda_left_paren", &self.with(left_paren))?; ss.serialize_field("lambda_parameters", &self.with(parameters))?; ss.serialize_field("lambda_right_paren", &self.with(right_paren))?; ss.serialize_field("lambda_contexts", &self.with(contexts))?; ss.serialize_field("lambda_colon", &self.with(colon))?; ss.serialize_field("lambda_readonly_return", &self.with(readonly_return))?; ss.serialize_field("lambda_type", &self.with(type_))?; ss.end() } SyntaxVariant::CastExpression (CastExpressionChildren{left_paren,type_,right_paren,operand} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "cast_expression")?; ss.serialize_field("cast_left_paren", &self.with(left_paren))?; ss.serialize_field("cast_type", &self.with(type_))?; ss.serialize_field("cast_right_paren", &self.with(right_paren))?; ss.serialize_field("cast_operand", &self.with(operand))?; ss.end() } SyntaxVariant::ScopeResolutionExpression (ScopeResolutionExpressionChildren{qualifier,operator,name} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "scope_resolution_expression")?; ss.serialize_field("scope_resolution_qualifier", &self.with(qualifier))?; ss.serialize_field("scope_resolution_operator", &self.with(operator))?; ss.serialize_field("scope_resolution_name", &self.with(name))?; ss.end() } SyntaxVariant::MemberSelectionExpression (MemberSelectionExpressionChildren{object,operator,name} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "member_selection_expression")?; ss.serialize_field("member_object", &self.with(object))?; ss.serialize_field("member_operator", &self.with(operator))?; ss.serialize_field("member_name", &self.with(name))?; ss.end() } SyntaxVariant::SafeMemberSelectionExpression (SafeMemberSelectionExpressionChildren{object,operator,name} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "safe_member_selection_expression")?; ss.serialize_field("safe_member_object", &self.with(object))?; ss.serialize_field("safe_member_operator", &self.with(operator))?; ss.serialize_field("safe_member_name", &self.with(name))?; ss.end() } SyntaxVariant::EmbeddedMemberSelectionExpression (EmbeddedMemberSelectionExpressionChildren{object,operator,name} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "embedded_member_selection_expression")?; ss.serialize_field("embedded_member_object", &self.with(object))?; ss.serialize_field("embedded_member_operator", &self.with(operator))?; ss.serialize_field("embedded_member_name", &self.with(name))?; ss.end() } SyntaxVariant::YieldExpression (YieldExpressionChildren{keyword,operand} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "yield_expression")?; ss.serialize_field("yield_keyword", &self.with(keyword))?; ss.serialize_field("yield_operand", &self.with(operand))?; ss.end() } SyntaxVariant::PrefixUnaryExpression (PrefixUnaryExpressionChildren{operator,operand} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "prefix_unary_expression")?; ss.serialize_field("prefix_unary_operator", &self.with(operator))?; ss.serialize_field("prefix_unary_operand", &self.with(operand))?; ss.end() } SyntaxVariant::PostfixUnaryExpression (PostfixUnaryExpressionChildren{operand,operator} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "postfix_unary_expression")?; ss.serialize_field("postfix_unary_operand", &self.with(operand))?; ss.serialize_field("postfix_unary_operator", &self.with(operator))?; ss.end() } SyntaxVariant::BinaryExpression (BinaryExpressionChildren{left_operand,operator,right_operand} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "binary_expression")?; ss.serialize_field("binary_left_operand", &self.with(left_operand))?; ss.serialize_field("binary_operator", &self.with(operator))?; ss.serialize_field("binary_right_operand", &self.with(right_operand))?; ss.end() } SyntaxVariant::IsExpression (IsExpressionChildren{left_operand,operator,right_operand} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "is_expression")?; ss.serialize_field("is_left_operand", &self.with(left_operand))?; ss.serialize_field("is_operator", &self.with(operator))?; ss.serialize_field("is_right_operand", &self.with(right_operand))?; ss.end() } SyntaxVariant::AsExpression (AsExpressionChildren{left_operand,operator,right_operand} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "as_expression")?; ss.serialize_field("as_left_operand", &self.with(left_operand))?; ss.serialize_field("as_operator", &self.with(operator))?; ss.serialize_field("as_right_operand", &self.with(right_operand))?; ss.end() } SyntaxVariant::NullableAsExpression (NullableAsExpressionChildren{left_operand,operator,right_operand} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "nullable_as_expression")?; ss.serialize_field("nullable_as_left_operand", &self.with(left_operand))?; ss.serialize_field("nullable_as_operator", &self.with(operator))?; ss.serialize_field("nullable_as_right_operand", &self.with(right_operand))?; ss.end() } SyntaxVariant::UpcastExpression (UpcastExpressionChildren{left_operand,operator,right_operand} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "upcast_expression")?; ss.serialize_field("upcast_left_operand", &self.with(left_operand))?; ss.serialize_field("upcast_operator", &self.with(operator))?; ss.serialize_field("upcast_right_operand", &self.with(right_operand))?; ss.end() } SyntaxVariant::ConditionalExpression (ConditionalExpressionChildren{test,question,consequence,colon,alternative} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "conditional_expression")?; ss.serialize_field("conditional_test", &self.with(test))?; ss.serialize_field("conditional_question", &self.with(question))?; ss.serialize_field("conditional_consequence", &self.with(consequence))?; ss.serialize_field("conditional_colon", &self.with(colon))?; ss.serialize_field("conditional_alternative", &self.with(alternative))?; ss.end() } SyntaxVariant::EvalExpression (EvalExpressionChildren{keyword,left_paren,argument,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "eval_expression")?; ss.serialize_field("eval_keyword", &self.with(keyword))?; ss.serialize_field("eval_left_paren", &self.with(left_paren))?; ss.serialize_field("eval_argument", &self.with(argument))?; ss.serialize_field("eval_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::IssetExpression (IssetExpressionChildren{keyword,left_paren,argument_list,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "isset_expression")?; ss.serialize_field("isset_keyword", &self.with(keyword))?; ss.serialize_field("isset_left_paren", &self.with(left_paren))?; ss.serialize_field("isset_argument_list", &self.with(argument_list))?; ss.serialize_field("isset_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::FunctionCallExpression (FunctionCallExpressionChildren{receiver,type_args,left_paren,argument_list,right_paren} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "function_call_expression")?; ss.serialize_field("function_call_receiver", &self.with(receiver))?; ss.serialize_field("function_call_type_args", &self.with(type_args))?; ss.serialize_field("function_call_left_paren", &self.with(left_paren))?; ss.serialize_field("function_call_argument_list", &self.with(argument_list))?; ss.serialize_field("function_call_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::FunctionPointerExpression (FunctionPointerExpressionChildren{receiver,type_args} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "function_pointer_expression")?; ss.serialize_field("function_pointer_receiver", &self.with(receiver))?; ss.serialize_field("function_pointer_type_args", &self.with(type_args))?; ss.end() } SyntaxVariant::ParenthesizedExpression (ParenthesizedExpressionChildren{left_paren,expression,right_paren} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "parenthesized_expression")?; ss.serialize_field("parenthesized_expression_left_paren", &self.with(left_paren))?; ss.serialize_field("parenthesized_expression_expression", &self.with(expression))?; ss.serialize_field("parenthesized_expression_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::BracedExpression (BracedExpressionChildren{left_brace,expression,right_brace} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "braced_expression")?; ss.serialize_field("braced_expression_left_brace", &self.with(left_brace))?; ss.serialize_field("braced_expression_expression", &self.with(expression))?; ss.serialize_field("braced_expression_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ETSpliceExpression (ETSpliceExpressionChildren{dollar,left_brace,expression,right_brace} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "et_splice_expression")?; ss.serialize_field("et_splice_expression_dollar", &self.with(dollar))?; ss.serialize_field("et_splice_expression_left_brace", &self.with(left_brace))?; ss.serialize_field("et_splice_expression_expression", &self.with(expression))?; ss.serialize_field("et_splice_expression_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::EmbeddedBracedExpression (EmbeddedBracedExpressionChildren{left_brace,expression,right_brace} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "embedded_braced_expression")?; ss.serialize_field("embedded_braced_expression_left_brace", &self.with(left_brace))?; ss.serialize_field("embedded_braced_expression_expression", &self.with(expression))?; ss.serialize_field("embedded_braced_expression_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ListExpression (ListExpressionChildren{keyword,left_paren,members,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "list_expression")?; ss.serialize_field("list_keyword", &self.with(keyword))?; ss.serialize_field("list_left_paren", &self.with(left_paren))?; ss.serialize_field("list_members", &self.with(members))?; ss.serialize_field("list_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::CollectionLiteralExpression (CollectionLiteralExpressionChildren{name,left_brace,initializers,right_brace} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "collection_literal_expression")?; ss.serialize_field("collection_literal_name", &self.with(name))?; ss.serialize_field("collection_literal_left_brace", &self.with(left_brace))?; ss.serialize_field("collection_literal_initializers", &self.with(initializers))?; ss.serialize_field("collection_literal_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ObjectCreationExpression (ObjectCreationExpressionChildren{new_keyword,object} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "object_creation_expression")?; ss.serialize_field("object_creation_new_keyword", &self.with(new_keyword))?; ss.serialize_field("object_creation_object", &self.with(object))?; ss.end() } SyntaxVariant::ConstructorCall (ConstructorCallChildren{type_,left_paren,argument_list,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "constructor_call")?; ss.serialize_field("constructor_call_type", &self.with(type_))?; ss.serialize_field("constructor_call_left_paren", &self.with(left_paren))?; ss.serialize_field("constructor_call_argument_list", &self.with(argument_list))?; ss.serialize_field("constructor_call_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::DarrayIntrinsicExpression (DarrayIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "darray_intrinsic_expression")?; ss.serialize_field("darray_intrinsic_keyword", &self.with(keyword))?; ss.serialize_field("darray_intrinsic_explicit_type", &self.with(explicit_type))?; ss.serialize_field("darray_intrinsic_left_bracket", &self.with(left_bracket))?; ss.serialize_field("darray_intrinsic_members", &self.with(members))?; ss.serialize_field("darray_intrinsic_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::DictionaryIntrinsicExpression (DictionaryIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "dictionary_intrinsic_expression")?; ss.serialize_field("dictionary_intrinsic_keyword", &self.with(keyword))?; ss.serialize_field("dictionary_intrinsic_explicit_type", &self.with(explicit_type))?; ss.serialize_field("dictionary_intrinsic_left_bracket", &self.with(left_bracket))?; ss.serialize_field("dictionary_intrinsic_members", &self.with(members))?; ss.serialize_field("dictionary_intrinsic_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::KeysetIntrinsicExpression (KeysetIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "keyset_intrinsic_expression")?; ss.serialize_field("keyset_intrinsic_keyword", &self.with(keyword))?; ss.serialize_field("keyset_intrinsic_explicit_type", &self.with(explicit_type))?; ss.serialize_field("keyset_intrinsic_left_bracket", &self.with(left_bracket))?; ss.serialize_field("keyset_intrinsic_members", &self.with(members))?; ss.serialize_field("keyset_intrinsic_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::VarrayIntrinsicExpression (VarrayIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "varray_intrinsic_expression")?; ss.serialize_field("varray_intrinsic_keyword", &self.with(keyword))?; ss.serialize_field("varray_intrinsic_explicit_type", &self.with(explicit_type))?; ss.serialize_field("varray_intrinsic_left_bracket", &self.with(left_bracket))?; ss.serialize_field("varray_intrinsic_members", &self.with(members))?; ss.serialize_field("varray_intrinsic_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::VectorIntrinsicExpression (VectorIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "vector_intrinsic_expression")?; ss.serialize_field("vector_intrinsic_keyword", &self.with(keyword))?; ss.serialize_field("vector_intrinsic_explicit_type", &self.with(explicit_type))?; ss.serialize_field("vector_intrinsic_left_bracket", &self.with(left_bracket))?; ss.serialize_field("vector_intrinsic_members", &self.with(members))?; ss.serialize_field("vector_intrinsic_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::ElementInitializer (ElementInitializerChildren{key,arrow,value} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "element_initializer")?; ss.serialize_field("element_key", &self.with(key))?; ss.serialize_field("element_arrow", &self.with(arrow))?; ss.serialize_field("element_value", &self.with(value))?; ss.end() } SyntaxVariant::SubscriptExpression (SubscriptExpressionChildren{receiver,left_bracket,index,right_bracket} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "subscript_expression")?; ss.serialize_field("subscript_receiver", &self.with(receiver))?; ss.serialize_field("subscript_left_bracket", &self.with(left_bracket))?; ss.serialize_field("subscript_index", &self.with(index))?; ss.serialize_field("subscript_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::EmbeddedSubscriptExpression (EmbeddedSubscriptExpressionChildren{receiver,left_bracket,index,right_bracket} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "embedded_subscript_expression")?; ss.serialize_field("embedded_subscript_receiver", &self.with(receiver))?; ss.serialize_field("embedded_subscript_left_bracket", &self.with(left_bracket))?; ss.serialize_field("embedded_subscript_index", &self.with(index))?; ss.serialize_field("embedded_subscript_right_bracket", &self.with(right_bracket))?; ss.end() } SyntaxVariant::AwaitableCreationExpression (AwaitableCreationExpressionChildren{attribute_spec,async_,compound_statement} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "awaitable_creation_expression")?; ss.serialize_field("awaitable_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("awaitable_async", &self.with(async_))?; ss.serialize_field("awaitable_compound_statement", &self.with(compound_statement))?; ss.end() } SyntaxVariant::XHPChildrenDeclaration (XHPChildrenDeclarationChildren{keyword,expression,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_children_declaration")?; ss.serialize_field("xhp_children_keyword", &self.with(keyword))?; ss.serialize_field("xhp_children_expression", &self.with(expression))?; ss.serialize_field("xhp_children_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::XHPChildrenParenthesizedList (XHPChildrenParenthesizedListChildren{left_paren,xhp_children,right_paren} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_children_parenthesized_list")?; ss.serialize_field("xhp_children_list_left_paren", &self.with(left_paren))?; ss.serialize_field("xhp_children_list_xhp_children", &self.with(xhp_children))?; ss.serialize_field("xhp_children_list_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::XHPCategoryDeclaration (XHPCategoryDeclarationChildren{keyword,categories,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_category_declaration")?; ss.serialize_field("xhp_category_keyword", &self.with(keyword))?; ss.serialize_field("xhp_category_categories", &self.with(categories))?; ss.serialize_field("xhp_category_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::XHPEnumType (XHPEnumTypeChildren{like,keyword,left_brace,values,right_brace} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "xhp_enum_type")?; ss.serialize_field("xhp_enum_like", &self.with(like))?; ss.serialize_field("xhp_enum_keyword", &self.with(keyword))?; ss.serialize_field("xhp_enum_left_brace", &self.with(left_brace))?; ss.serialize_field("xhp_enum_values", &self.with(values))?; ss.serialize_field("xhp_enum_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::XHPLateinit (XHPLateinitChildren{at,keyword} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "xhp_lateinit")?; ss.serialize_field("xhp_lateinit_at", &self.with(at))?; ss.serialize_field("xhp_lateinit_keyword", &self.with(keyword))?; ss.end() } SyntaxVariant::XHPRequired (XHPRequiredChildren{at,keyword} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "xhp_required")?; ss.serialize_field("xhp_required_at", &self.with(at))?; ss.serialize_field("xhp_required_keyword", &self.with(keyword))?; ss.end() } SyntaxVariant::XHPClassAttributeDeclaration (XHPClassAttributeDeclarationChildren{keyword,attributes,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_class_attribute_declaration")?; ss.serialize_field("xhp_attribute_keyword", &self.with(keyword))?; ss.serialize_field("xhp_attribute_attributes", &self.with(attributes))?; ss.serialize_field("xhp_attribute_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::XHPClassAttribute (XHPClassAttributeChildren{type_,name,initializer,required} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "xhp_class_attribute")?; ss.serialize_field("xhp_attribute_decl_type", &self.with(type_))?; ss.serialize_field("xhp_attribute_decl_name", &self.with(name))?; ss.serialize_field("xhp_attribute_decl_initializer", &self.with(initializer))?; ss.serialize_field("xhp_attribute_decl_required", &self.with(required))?; ss.end() } SyntaxVariant::XHPSimpleClassAttribute (XHPSimpleClassAttributeChildren{type_} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "xhp_simple_class_attribute")?; ss.serialize_field("xhp_simple_class_attribute_type", &self.with(type_))?; ss.end() } SyntaxVariant::XHPSimpleAttribute (XHPSimpleAttributeChildren{name,equal,expression} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_simple_attribute")?; ss.serialize_field("xhp_simple_attribute_name", &self.with(name))?; ss.serialize_field("xhp_simple_attribute_equal", &self.with(equal))?; ss.serialize_field("xhp_simple_attribute_expression", &self.with(expression))?; ss.end() } SyntaxVariant::XHPSpreadAttribute (XHPSpreadAttributeChildren{left_brace,spread_operator,expression,right_brace} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "xhp_spread_attribute")?; ss.serialize_field("xhp_spread_attribute_left_brace", &self.with(left_brace))?; ss.serialize_field("xhp_spread_attribute_spread_operator", &self.with(spread_operator))?; ss.serialize_field("xhp_spread_attribute_expression", &self.with(expression))?; ss.serialize_field("xhp_spread_attribute_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::XHPOpen (XHPOpenChildren{left_angle,name,attributes,right_angle} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "xhp_open")?; ss.serialize_field("xhp_open_left_angle", &self.with(left_angle))?; ss.serialize_field("xhp_open_name", &self.with(name))?; ss.serialize_field("xhp_open_attributes", &self.with(attributes))?; ss.serialize_field("xhp_open_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::XHPExpression (XHPExpressionChildren{open,body,close} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_expression")?; ss.serialize_field("xhp_open", &self.with(open))?; ss.serialize_field("xhp_body", &self.with(body))?; ss.serialize_field("xhp_close", &self.with(close))?; ss.end() } SyntaxVariant::XHPClose (XHPCloseChildren{left_angle,name,right_angle} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "xhp_close")?; ss.serialize_field("xhp_close_left_angle", &self.with(left_angle))?; ss.serialize_field("xhp_close_name", &self.with(name))?; ss.serialize_field("xhp_close_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::TypeConstant (TypeConstantChildren{left_type,separator,right_type} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "type_constant")?; ss.serialize_field("type_constant_left_type", &self.with(left_type))?; ss.serialize_field("type_constant_separator", &self.with(separator))?; ss.serialize_field("type_constant_right_type", &self.with(right_type))?; ss.end() } SyntaxVariant::VectorTypeSpecifier (VectorTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "vector_type_specifier")?; ss.serialize_field("vector_type_keyword", &self.with(keyword))?; ss.serialize_field("vector_type_left_angle", &self.with(left_angle))?; ss.serialize_field("vector_type_type", &self.with(type_))?; ss.serialize_field("vector_type_trailing_comma", &self.with(trailing_comma))?; ss.serialize_field("vector_type_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::KeysetTypeSpecifier (KeysetTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "keyset_type_specifier")?; ss.serialize_field("keyset_type_keyword", &self.with(keyword))?; ss.serialize_field("keyset_type_left_angle", &self.with(left_angle))?; ss.serialize_field("keyset_type_type", &self.with(type_))?; ss.serialize_field("keyset_type_trailing_comma", &self.with(trailing_comma))?; ss.serialize_field("keyset_type_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::TupleTypeExplicitSpecifier (TupleTypeExplicitSpecifierChildren{keyword,left_angle,types,right_angle} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "tuple_type_explicit_specifier")?; ss.serialize_field("tuple_type_keyword", &self.with(keyword))?; ss.serialize_field("tuple_type_left_angle", &self.with(left_angle))?; ss.serialize_field("tuple_type_types", &self.with(types))?; ss.serialize_field("tuple_type_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::VarrayTypeSpecifier (VarrayTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "varray_type_specifier")?; ss.serialize_field("varray_keyword", &self.with(keyword))?; ss.serialize_field("varray_left_angle", &self.with(left_angle))?; ss.serialize_field("varray_type", &self.with(type_))?; ss.serialize_field("varray_trailing_comma", &self.with(trailing_comma))?; ss.serialize_field("varray_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::FunctionCtxTypeSpecifier (FunctionCtxTypeSpecifierChildren{keyword,variable} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "function_ctx_type_specifier")?; ss.serialize_field("function_ctx_type_keyword", &self.with(keyword))?; ss.serialize_field("function_ctx_type_variable", &self.with(variable))?; ss.end() } SyntaxVariant::TypeParameter (TypeParameterChildren{attribute_spec,reified,variance,name,param_params,constraints} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "type_parameter")?; ss.serialize_field("type_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("type_reified", &self.with(reified))?; ss.serialize_field("type_variance", &self.with(variance))?; ss.serialize_field("type_name", &self.with(name))?; ss.serialize_field("type_param_params", &self.with(param_params))?; ss.serialize_field("type_constraints", &self.with(constraints))?; ss.end() } SyntaxVariant::TypeConstraint (TypeConstraintChildren{keyword,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "type_constraint")?; ss.serialize_field("constraint_keyword", &self.with(keyword))?; ss.serialize_field("constraint_type", &self.with(type_))?; ss.end() } SyntaxVariant::ContextConstraint (ContextConstraintChildren{keyword,ctx_list} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "context_constraint")?; ss.serialize_field("ctx_constraint_keyword", &self.with(keyword))?; ss.serialize_field("ctx_constraint_ctx_list", &self.with(ctx_list))?; ss.end() } SyntaxVariant::DarrayTypeSpecifier (DarrayTypeSpecifierChildren{keyword,left_angle,key,comma,value,trailing_comma,right_angle} ) => { let mut ss = s.serialize_struct("", 8)?; ss.serialize_field("kind", "darray_type_specifier")?; ss.serialize_field("darray_keyword", &self.with(keyword))?; ss.serialize_field("darray_left_angle", &self.with(left_angle))?; ss.serialize_field("darray_key", &self.with(key))?; ss.serialize_field("darray_comma", &self.with(comma))?; ss.serialize_field("darray_value", &self.with(value))?; ss.serialize_field("darray_trailing_comma", &self.with(trailing_comma))?; ss.serialize_field("darray_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::DictionaryTypeSpecifier (DictionaryTypeSpecifierChildren{keyword,left_angle,members,right_angle} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "dictionary_type_specifier")?; ss.serialize_field("dictionary_type_keyword", &self.with(keyword))?; ss.serialize_field("dictionary_type_left_angle", &self.with(left_angle))?; ss.serialize_field("dictionary_type_members", &self.with(members))?; ss.serialize_field("dictionary_type_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::ClosureTypeSpecifier (ClosureTypeSpecifierChildren{outer_left_paren,readonly_keyword,function_keyword,inner_left_paren,parameter_list,inner_right_paren,contexts,colon,readonly_return,return_type,outer_right_paren} ) => { let mut ss = s.serialize_struct("", 12)?; ss.serialize_field("kind", "closure_type_specifier")?; ss.serialize_field("closure_outer_left_paren", &self.with(outer_left_paren))?; ss.serialize_field("closure_readonly_keyword", &self.with(readonly_keyword))?; ss.serialize_field("closure_function_keyword", &self.with(function_keyword))?; ss.serialize_field("closure_inner_left_paren", &self.with(inner_left_paren))?; ss.serialize_field("closure_parameter_list", &self.with(parameter_list))?; ss.serialize_field("closure_inner_right_paren", &self.with(inner_right_paren))?; ss.serialize_field("closure_contexts", &self.with(contexts))?; ss.serialize_field("closure_colon", &self.with(colon))?; ss.serialize_field("closure_readonly_return", &self.with(readonly_return))?; ss.serialize_field("closure_return_type", &self.with(return_type))?; ss.serialize_field("closure_outer_right_paren", &self.with(outer_right_paren))?; ss.end() } SyntaxVariant::ClosureParameterTypeSpecifier (ClosureParameterTypeSpecifierChildren{call_convention,readonly,type_} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "closure_parameter_type_specifier")?; ss.serialize_field("closure_parameter_call_convention", &self.with(call_convention))?; ss.serialize_field("closure_parameter_readonly", &self.with(readonly))?; ss.serialize_field("closure_parameter_type", &self.with(type_))?; ss.end() } SyntaxVariant::TypeRefinement (TypeRefinementChildren{type_,keyword,left_brace,members,right_brace} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "type_refinement")?; ss.serialize_field("type_refinement_type", &self.with(type_))?; ss.serialize_field("type_refinement_keyword", &self.with(keyword))?; ss.serialize_field("type_refinement_left_brace", &self.with(left_brace))?; ss.serialize_field("type_refinement_members", &self.with(members))?; ss.serialize_field("type_refinement_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::TypeInRefinement (TypeInRefinementChildren{keyword,name,type_parameters,constraints,equal,type_} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "type_in_refinement")?; ss.serialize_field("type_in_refinement_keyword", &self.with(keyword))?; ss.serialize_field("type_in_refinement_name", &self.with(name))?; ss.serialize_field("type_in_refinement_type_parameters", &self.with(type_parameters))?; ss.serialize_field("type_in_refinement_constraints", &self.with(constraints))?; ss.serialize_field("type_in_refinement_equal", &self.with(equal))?; ss.serialize_field("type_in_refinement_type", &self.with(type_))?; ss.end() } SyntaxVariant::CtxInRefinement (CtxInRefinementChildren{keyword,name,type_parameters,constraints,equal,ctx_list} ) => { let mut ss = s.serialize_struct("", 7)?; ss.serialize_field("kind", "ctx_in_refinement")?; ss.serialize_field("ctx_in_refinement_keyword", &self.with(keyword))?; ss.serialize_field("ctx_in_refinement_name", &self.with(name))?; ss.serialize_field("ctx_in_refinement_type_parameters", &self.with(type_parameters))?; ss.serialize_field("ctx_in_refinement_constraints", &self.with(constraints))?; ss.serialize_field("ctx_in_refinement_equal", &self.with(equal))?; ss.serialize_field("ctx_in_refinement_ctx_list", &self.with(ctx_list))?; ss.end() } SyntaxVariant::ClassnameTypeSpecifier (ClassnameTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "classname_type_specifier")?; ss.serialize_field("classname_keyword", &self.with(keyword))?; ss.serialize_field("classname_left_angle", &self.with(left_angle))?; ss.serialize_field("classname_type", &self.with(type_))?; ss.serialize_field("classname_trailing_comma", &self.with(trailing_comma))?; ss.serialize_field("classname_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::FieldSpecifier (FieldSpecifierChildren{question,name,arrow,type_} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "field_specifier")?; ss.serialize_field("field_question", &self.with(question))?; ss.serialize_field("field_name", &self.with(name))?; ss.serialize_field("field_arrow", &self.with(arrow))?; ss.serialize_field("field_type", &self.with(type_))?; ss.end() } SyntaxVariant::FieldInitializer (FieldInitializerChildren{name,arrow,value} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "field_initializer")?; ss.serialize_field("field_initializer_name", &self.with(name))?; ss.serialize_field("field_initializer_arrow", &self.with(arrow))?; ss.serialize_field("field_initializer_value", &self.with(value))?; ss.end() } SyntaxVariant::ShapeTypeSpecifier (ShapeTypeSpecifierChildren{keyword,left_paren,fields,ellipsis,right_paren} ) => { let mut ss = s.serialize_struct("", 6)?; ss.serialize_field("kind", "shape_type_specifier")?; ss.serialize_field("shape_type_keyword", &self.with(keyword))?; ss.serialize_field("shape_type_left_paren", &self.with(left_paren))?; ss.serialize_field("shape_type_fields", &self.with(fields))?; ss.serialize_field("shape_type_ellipsis", &self.with(ellipsis))?; ss.serialize_field("shape_type_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::ShapeExpression (ShapeExpressionChildren{keyword,left_paren,fields,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "shape_expression")?; ss.serialize_field("shape_expression_keyword", &self.with(keyword))?; ss.serialize_field("shape_expression_left_paren", &self.with(left_paren))?; ss.serialize_field("shape_expression_fields", &self.with(fields))?; ss.serialize_field("shape_expression_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::TupleExpression (TupleExpressionChildren{keyword,left_paren,items,right_paren} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "tuple_expression")?; ss.serialize_field("tuple_expression_keyword", &self.with(keyword))?; ss.serialize_field("tuple_expression_left_paren", &self.with(left_paren))?; ss.serialize_field("tuple_expression_items", &self.with(items))?; ss.serialize_field("tuple_expression_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::GenericTypeSpecifier (GenericTypeSpecifierChildren{class_type,argument_list} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "generic_type_specifier")?; ss.serialize_field("generic_class_type", &self.with(class_type))?; ss.serialize_field("generic_argument_list", &self.with(argument_list))?; ss.end() } SyntaxVariant::NullableTypeSpecifier (NullableTypeSpecifierChildren{question,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "nullable_type_specifier")?; ss.serialize_field("nullable_question", &self.with(question))?; ss.serialize_field("nullable_type", &self.with(type_))?; ss.end() } SyntaxVariant::LikeTypeSpecifier (LikeTypeSpecifierChildren{tilde,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "like_type_specifier")?; ss.serialize_field("like_tilde", &self.with(tilde))?; ss.serialize_field("like_type", &self.with(type_))?; ss.end() } SyntaxVariant::SoftTypeSpecifier (SoftTypeSpecifierChildren{at,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "soft_type_specifier")?; ss.serialize_field("soft_at", &self.with(at))?; ss.serialize_field("soft_type", &self.with(type_))?; ss.end() } SyntaxVariant::AttributizedSpecifier (AttributizedSpecifierChildren{attribute_spec,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "attributized_specifier")?; ss.serialize_field("attributized_specifier_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("attributized_specifier_type", &self.with(type_))?; ss.end() } SyntaxVariant::ReifiedTypeArgument (ReifiedTypeArgumentChildren{reified,type_} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "reified_type_argument")?; ss.serialize_field("reified_type_argument_reified", &self.with(reified))?; ss.serialize_field("reified_type_argument_type", &self.with(type_))?; ss.end() } SyntaxVariant::TypeArguments (TypeArgumentsChildren{left_angle,types,right_angle} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "type_arguments")?; ss.serialize_field("type_arguments_left_angle", &self.with(left_angle))?; ss.serialize_field("type_arguments_types", &self.with(types))?; ss.serialize_field("type_arguments_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::TypeParameters (TypeParametersChildren{left_angle,parameters,right_angle} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "type_parameters")?; ss.serialize_field("type_parameters_left_angle", &self.with(left_angle))?; ss.serialize_field("type_parameters_parameters", &self.with(parameters))?; ss.serialize_field("type_parameters_right_angle", &self.with(right_angle))?; ss.end() } SyntaxVariant::TupleTypeSpecifier (TupleTypeSpecifierChildren{left_paren,types,right_paren} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "tuple_type_specifier")?; ss.serialize_field("tuple_left_paren", &self.with(left_paren))?; ss.serialize_field("tuple_types", &self.with(types))?; ss.serialize_field("tuple_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::UnionTypeSpecifier (UnionTypeSpecifierChildren{left_paren,types,right_paren} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "union_type_specifier")?; ss.serialize_field("union_left_paren", &self.with(left_paren))?; ss.serialize_field("union_types", &self.with(types))?; ss.serialize_field("union_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::IntersectionTypeSpecifier (IntersectionTypeSpecifierChildren{left_paren,types,right_paren} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "intersection_type_specifier")?; ss.serialize_field("intersection_left_paren", &self.with(left_paren))?; ss.serialize_field("intersection_types", &self.with(types))?; ss.serialize_field("intersection_right_paren", &self.with(right_paren))?; ss.end() } SyntaxVariant::ErrorSyntax (ErrorSyntaxChildren{error} ) => { let mut ss = s.serialize_struct("", 2)?; ss.serialize_field("kind", "error")?; ss.serialize_field("error_error", &self.with(error))?; ss.end() } SyntaxVariant::ListItem (ListItemChildren{item,separator} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "list_item")?; ss.serialize_field("list_item", &self.with(item))?; ss.serialize_field("list_separator", &self.with(separator))?; ss.end() } SyntaxVariant::EnumClassLabelExpression (EnumClassLabelExpressionChildren{qualifier,hash,expression} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "enum_class_label")?; ss.serialize_field("enum_class_label_qualifier", &self.with(qualifier))?; ss.serialize_field("enum_class_label_hash", &self.with(hash))?; ss.serialize_field("enum_class_label_expression", &self.with(expression))?; ss.end() } SyntaxVariant::ModuleDeclaration (ModuleDeclarationChildren{attribute_spec,new_keyword,module_keyword,name,left_brace,exports,imports,right_brace} ) => { let mut ss = s.serialize_struct("", 9)?; ss.serialize_field("kind", "module_declaration")?; ss.serialize_field("module_declaration_attribute_spec", &self.with(attribute_spec))?; ss.serialize_field("module_declaration_new_keyword", &self.with(new_keyword))?; ss.serialize_field("module_declaration_module_keyword", &self.with(module_keyword))?; ss.serialize_field("module_declaration_name", &self.with(name))?; ss.serialize_field("module_declaration_left_brace", &self.with(left_brace))?; ss.serialize_field("module_declaration_exports", &self.with(exports))?; ss.serialize_field("module_declaration_imports", &self.with(imports))?; ss.serialize_field("module_declaration_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ModuleExports (ModuleExportsChildren{exports_keyword,left_brace,exports,right_brace} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "module_exports")?; ss.serialize_field("module_exports_exports_keyword", &self.with(exports_keyword))?; ss.serialize_field("module_exports_left_brace", &self.with(left_brace))?; ss.serialize_field("module_exports_exports", &self.with(exports))?; ss.serialize_field("module_exports_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ModuleImports (ModuleImportsChildren{imports_keyword,left_brace,imports,right_brace} ) => { let mut ss = s.serialize_struct("", 5)?; ss.serialize_field("kind", "module_imports")?; ss.serialize_field("module_imports_imports_keyword", &self.with(imports_keyword))?; ss.serialize_field("module_imports_left_brace", &self.with(left_brace))?; ss.serialize_field("module_imports_imports", &self.with(imports))?; ss.serialize_field("module_imports_right_brace", &self.with(right_brace))?; ss.end() } SyntaxVariant::ModuleMembershipDeclaration (ModuleMembershipDeclarationChildren{module_keyword,name,semicolon} ) => { let mut ss = s.serialize_struct("", 4)?; ss.serialize_field("kind", "module_membership_declaration")?; ss.serialize_field("module_membership_declaration_module_keyword", &self.with(module_keyword))?; ss.serialize_field("module_membership_declaration_name", &self.with(name))?; ss.serialize_field("module_membership_declaration_semicolon", &self.with(semicolon))?; ss.end() } SyntaxVariant::PackageExpression (PackageExpressionChildren{keyword,name} ) => { let mut ss = s.serialize_struct("", 3)?; ss.serialize_field("kind", "package_expression")?; ss.serialize_field("package_expression_keyword", &self.with(keyword))?; ss.serialize_field("package_expression_name", &self.with(name))?; ss.end() } } } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_type_impl_generated.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ use super::{ has_arena::HasArena, syntax::*, syntax_variant_generated::*, }; use crate::{ lexable_token::LexableToken, syntax::{SyntaxType, SyntaxValueType}, }; impl<'a, C, T, V> SyntaxType<C> for Syntax<'a, T, V> where T: LexableToken + Copy, V: SyntaxValueType<T>, C: HasArena<'a>, { fn make_end_of_file(ctx: &C, token: Self) -> Self { let syntax = SyntaxVariant::EndOfFile(ctx.get_arena().alloc(EndOfFileChildren { token, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_script(ctx: &C, declarations: Self) -> Self { let syntax = SyntaxVariant::Script(ctx.get_arena().alloc(ScriptChildren { declarations, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_qualified_name(ctx: &C, parts: Self) -> Self { let syntax = SyntaxVariant::QualifiedName(ctx.get_arena().alloc(QualifiedNameChildren { parts, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_module_name(ctx: &C, parts: Self) -> Self { let syntax = SyntaxVariant::ModuleName(ctx.get_arena().alloc(ModuleNameChildren { parts, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_simple_type_specifier(ctx: &C, specifier: Self) -> Self { let syntax = SyntaxVariant::SimpleTypeSpecifier(ctx.get_arena().alloc(SimpleTypeSpecifierChildren { specifier, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_literal_expression(ctx: &C, expression: Self) -> Self { let syntax = SyntaxVariant::LiteralExpression(ctx.get_arena().alloc(LiteralExpressionChildren { expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_prefixed_string_expression(ctx: &C, name: Self, str: Self) -> Self { let syntax = SyntaxVariant::PrefixedStringExpression(ctx.get_arena().alloc(PrefixedStringExpressionChildren { name, str, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_prefixed_code_expression(ctx: &C, prefix: Self, left_backtick: Self, body: Self, right_backtick: Self) -> Self { let syntax = SyntaxVariant::PrefixedCodeExpression(ctx.get_arena().alloc(PrefixedCodeExpressionChildren { prefix, left_backtick, body, right_backtick, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_variable_expression(ctx: &C, expression: Self) -> Self { let syntax = SyntaxVariant::VariableExpression(ctx.get_arena().alloc(VariableExpressionChildren { expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_pipe_variable_expression(ctx: &C, expression: Self) -> Self { let syntax = SyntaxVariant::PipeVariableExpression(ctx.get_arena().alloc(PipeVariableExpressionChildren { expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_file_attribute_specification(ctx: &C, left_double_angle: Self, keyword: Self, colon: Self, attributes: Self, right_double_angle: Self) -> Self { let syntax = SyntaxVariant::FileAttributeSpecification(ctx.get_arena().alloc(FileAttributeSpecificationChildren { left_double_angle, keyword, colon, attributes, right_double_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enum_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, keyword: Self, name: Self, colon: Self, base: Self, type_: Self, left_brace: Self, use_clauses: Self, enumerators: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::EnumDeclaration(ctx.get_arena().alloc(EnumDeclarationChildren { attribute_spec, modifiers, keyword, name, colon, base, type_, left_brace, use_clauses, enumerators, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enum_use(ctx: &C, keyword: Self, names: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::EnumUse(ctx.get_arena().alloc(EnumUseChildren { keyword, names, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enumerator(ctx: &C, name: Self, equal: Self, value: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::Enumerator(ctx.get_arena().alloc(EnumeratorChildren { name, equal, value, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enum_class_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, enum_keyword: Self, class_keyword: Self, name: Self, colon: Self, base: Self, extends: Self, extends_list: Self, left_brace: Self, elements: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::EnumClassDeclaration(ctx.get_arena().alloc(EnumClassDeclarationChildren { attribute_spec, modifiers, enum_keyword, class_keyword, name, colon, base, extends, extends_list, left_brace, elements, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enum_class_enumerator(ctx: &C, modifiers: Self, type_: Self, name: Self, initializer: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::EnumClassEnumerator(ctx.get_arena().alloc(EnumClassEnumeratorChildren { modifiers, type_, name, initializer, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_alias_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, module_kw_opt: Self, keyword: Self, name: Self, generic_parameter: Self, constraint: Self, equal: Self, type_: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::AliasDeclaration(ctx.get_arena().alloc(AliasDeclarationChildren { attribute_spec, modifiers, module_kw_opt, keyword, name, generic_parameter, constraint, equal, type_, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_context_alias_declaration(ctx: &C, attribute_spec: Self, keyword: Self, name: Self, generic_parameter: Self, as_constraint: Self, equal: Self, context: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ContextAliasDeclaration(ctx.get_arena().alloc(ContextAliasDeclarationChildren { attribute_spec, keyword, name, generic_parameter, as_constraint, equal, context, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_case_type_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, case_keyword: Self, type_keyword: Self, name: Self, generic_parameter: Self, as_: Self, bounds: Self, equal: Self, variants: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::CaseTypeDeclaration(ctx.get_arena().alloc(CaseTypeDeclarationChildren { attribute_spec, modifiers, case_keyword, type_keyword, name, generic_parameter, as_, bounds, equal, variants, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_case_type_variant(ctx: &C, bar: Self, type_: Self) -> Self { let syntax = SyntaxVariant::CaseTypeVariant(ctx.get_arena().alloc(CaseTypeVariantChildren { bar, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_property_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, type_: Self, declarators: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::PropertyDeclaration(ctx.get_arena().alloc(PropertyDeclarationChildren { attribute_spec, modifiers, type_, declarators, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_property_declarator(ctx: &C, name: Self, initializer: Self) -> Self { let syntax = SyntaxVariant::PropertyDeclarator(ctx.get_arena().alloc(PropertyDeclaratorChildren { name, initializer, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_declaration(ctx: &C, header: Self, body: Self) -> Self { let syntax = SyntaxVariant::NamespaceDeclaration(ctx.get_arena().alloc(NamespaceDeclarationChildren { header, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_declaration_header(ctx: &C, keyword: Self, name: Self) -> Self { let syntax = SyntaxVariant::NamespaceDeclarationHeader(ctx.get_arena().alloc(NamespaceDeclarationHeaderChildren { keyword, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_body(ctx: &C, left_brace: Self, declarations: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::NamespaceBody(ctx.get_arena().alloc(NamespaceBodyChildren { left_brace, declarations, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_empty_body(ctx: &C, semicolon: Self) -> Self { let syntax = SyntaxVariant::NamespaceEmptyBody(ctx.get_arena().alloc(NamespaceEmptyBodyChildren { semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_use_declaration(ctx: &C, keyword: Self, kind: Self, clauses: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::NamespaceUseDeclaration(ctx.get_arena().alloc(NamespaceUseDeclarationChildren { keyword, kind, clauses, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_group_use_declaration(ctx: &C, keyword: Self, kind: Self, prefix: Self, left_brace: Self, clauses: Self, right_brace: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::NamespaceGroupUseDeclaration(ctx.get_arena().alloc(NamespaceGroupUseDeclarationChildren { keyword, kind, prefix, left_brace, clauses, right_brace, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_namespace_use_clause(ctx: &C, clause_kind: Self, name: Self, as_: Self, alias: Self) -> Self { let syntax = SyntaxVariant::NamespaceUseClause(ctx.get_arena().alloc(NamespaceUseClauseChildren { clause_kind, name, as_, alias, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_function_declaration(ctx: &C, attribute_spec: Self, declaration_header: Self, body: Self) -> Self { let syntax = SyntaxVariant::FunctionDeclaration(ctx.get_arena().alloc(FunctionDeclarationChildren { attribute_spec, declaration_header, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_function_declaration_header(ctx: &C, modifiers: Self, keyword: Self, name: Self, type_parameter_list: Self, left_paren: Self, parameter_list: Self, right_paren: Self, contexts: Self, colon: Self, readonly_return: Self, type_: Self, where_clause: Self) -> Self { let syntax = SyntaxVariant::FunctionDeclarationHeader(ctx.get_arena().alloc(FunctionDeclarationHeaderChildren { modifiers, keyword, name, type_parameter_list, left_paren, parameter_list, right_paren, contexts, colon, readonly_return, type_, where_clause, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_contexts(ctx: &C, left_bracket: Self, types: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::Contexts(ctx.get_arena().alloc(ContextsChildren { left_bracket, types, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_where_clause(ctx: &C, keyword: Self, constraints: Self) -> Self { let syntax = SyntaxVariant::WhereClause(ctx.get_arena().alloc(WhereClauseChildren { keyword, constraints, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_where_constraint(ctx: &C, left_type: Self, operator: Self, right_type: Self) -> Self { let syntax = SyntaxVariant::WhereConstraint(ctx.get_arena().alloc(WhereConstraintChildren { left_type, operator, right_type, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_methodish_declaration(ctx: &C, attribute: Self, function_decl_header: Self, function_body: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::MethodishDeclaration(ctx.get_arena().alloc(MethodishDeclarationChildren { attribute, function_decl_header, function_body, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_methodish_trait_resolution(ctx: &C, attribute: Self, function_decl_header: Self, equal: Self, name: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::MethodishTraitResolution(ctx.get_arena().alloc(MethodishTraitResolutionChildren { attribute, function_decl_header, equal, name, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_classish_declaration(ctx: &C, attribute: Self, modifiers: Self, xhp: Self, keyword: Self, name: Self, type_parameters: Self, extends_keyword: Self, extends_list: Self, implements_keyword: Self, implements_list: Self, where_clause: Self, body: Self) -> Self { let syntax = SyntaxVariant::ClassishDeclaration(ctx.get_arena().alloc(ClassishDeclarationChildren { attribute, modifiers, xhp, keyword, name, type_parameters, extends_keyword, extends_list, implements_keyword, implements_list, where_clause, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_classish_body(ctx: &C, left_brace: Self, elements: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::ClassishBody(ctx.get_arena().alloc(ClassishBodyChildren { left_brace, elements, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_trait_use(ctx: &C, keyword: Self, names: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::TraitUse(ctx.get_arena().alloc(TraitUseChildren { keyword, names, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_require_clause(ctx: &C, keyword: Self, kind: Self, name: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::RequireClause(ctx.get_arena().alloc(RequireClauseChildren { keyword, kind, name, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_const_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, keyword: Self, type_specifier: Self, declarators: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ConstDeclaration(ctx.get_arena().alloc(ConstDeclarationChildren { attribute_spec, modifiers, keyword, type_specifier, declarators, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_constant_declarator(ctx: &C, name: Self, initializer: Self) -> Self { let syntax = SyntaxVariant::ConstantDeclarator(ctx.get_arena().alloc(ConstantDeclaratorChildren { name, initializer, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_const_declaration(ctx: &C, attribute_spec: Self, modifiers: Self, keyword: Self, type_keyword: Self, name: Self, type_parameters: Self, type_constraints: Self, equal: Self, type_specifier: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::TypeConstDeclaration(ctx.get_arena().alloc(TypeConstDeclarationChildren { attribute_spec, modifiers, keyword, type_keyword, name, type_parameters, type_constraints, equal, type_specifier, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_context_const_declaration(ctx: &C, modifiers: Self, const_keyword: Self, ctx_keyword: Self, name: Self, type_parameters: Self, constraint: Self, equal: Self, ctx_list: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ContextConstDeclaration(ctx.get_arena().alloc(ContextConstDeclarationChildren { modifiers, const_keyword, ctx_keyword, name, type_parameters, constraint, equal, ctx_list, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_decorated_expression(ctx: &C, decorator: Self, expression: Self) -> Self { let syntax = SyntaxVariant::DecoratedExpression(ctx.get_arena().alloc(DecoratedExpressionChildren { decorator, expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_parameter_declaration(ctx: &C, attribute: Self, visibility: Self, call_convention: Self, readonly: Self, type_: Self, name: Self, default_value: Self) -> Self { let syntax = SyntaxVariant::ParameterDeclaration(ctx.get_arena().alloc(ParameterDeclarationChildren { attribute, visibility, call_convention, readonly, type_, name, default_value, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_variadic_parameter(ctx: &C, call_convention: Self, type_: Self, ellipsis: Self) -> Self { let syntax = SyntaxVariant::VariadicParameter(ctx.get_arena().alloc(VariadicParameterChildren { call_convention, type_, ellipsis, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_old_attribute_specification(ctx: &C, left_double_angle: Self, attributes: Self, right_double_angle: Self) -> Self { let syntax = SyntaxVariant::OldAttributeSpecification(ctx.get_arena().alloc(OldAttributeSpecificationChildren { left_double_angle, attributes, right_double_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_attribute_specification(ctx: &C, attributes: Self) -> Self { let syntax = SyntaxVariant::AttributeSpecification(ctx.get_arena().alloc(AttributeSpecificationChildren { attributes, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_attribute(ctx: &C, at: Self, attribute_name: Self) -> Self { let syntax = SyntaxVariant::Attribute(ctx.get_arena().alloc(AttributeChildren { at, attribute_name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_inclusion_expression(ctx: &C, require: Self, filename: Self) -> Self { let syntax = SyntaxVariant::InclusionExpression(ctx.get_arena().alloc(InclusionExpressionChildren { require, filename, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_inclusion_directive(ctx: &C, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::InclusionDirective(ctx.get_arena().alloc(InclusionDirectiveChildren { expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_compound_statement(ctx: &C, left_brace: Self, statements: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::CompoundStatement(ctx.get_arena().alloc(CompoundStatementChildren { left_brace, statements, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_expression_statement(ctx: &C, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ExpressionStatement(ctx.get_arena().alloc(ExpressionStatementChildren { expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_markup_section(ctx: &C, hashbang: Self, suffix: Self) -> Self { let syntax = SyntaxVariant::MarkupSection(ctx.get_arena().alloc(MarkupSectionChildren { hashbang, suffix, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_markup_suffix(ctx: &C, less_than_question: Self, name: Self) -> Self { let syntax = SyntaxVariant::MarkupSuffix(ctx.get_arena().alloc(MarkupSuffixChildren { less_than_question, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_unset_statement(ctx: &C, keyword: Self, left_paren: Self, variables: Self, right_paren: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::UnsetStatement(ctx.get_arena().alloc(UnsetStatementChildren { keyword, left_paren, variables, right_paren, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_declare_local_statement(ctx: &C, keyword: Self, variable: Self, colon: Self, type_: Self, initializer: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::DeclareLocalStatement(ctx.get_arena().alloc(DeclareLocalStatementChildren { keyword, variable, colon, type_, initializer, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_using_statement_block_scoped(ctx: &C, await_keyword: Self, using_keyword: Self, left_paren: Self, expressions: Self, right_paren: Self, body: Self) -> Self { let syntax = SyntaxVariant::UsingStatementBlockScoped(ctx.get_arena().alloc(UsingStatementBlockScopedChildren { await_keyword, using_keyword, left_paren, expressions, right_paren, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_using_statement_function_scoped(ctx: &C, await_keyword: Self, using_keyword: Self, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::UsingStatementFunctionScoped(ctx.get_arena().alloc(UsingStatementFunctionScopedChildren { await_keyword, using_keyword, expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_while_statement(ctx: &C, keyword: Self, left_paren: Self, condition: Self, right_paren: Self, body: Self) -> Self { let syntax = SyntaxVariant::WhileStatement(ctx.get_arena().alloc(WhileStatementChildren { keyword, left_paren, condition, right_paren, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_if_statement(ctx: &C, keyword: Self, left_paren: Self, condition: Self, right_paren: Self, statement: Self, else_clause: Self) -> Self { let syntax = SyntaxVariant::IfStatement(ctx.get_arena().alloc(IfStatementChildren { keyword, left_paren, condition, right_paren, statement, else_clause, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_else_clause(ctx: &C, keyword: Self, statement: Self) -> Self { let syntax = SyntaxVariant::ElseClause(ctx.get_arena().alloc(ElseClauseChildren { keyword, statement, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_try_statement(ctx: &C, keyword: Self, compound_statement: Self, catch_clauses: Self, finally_clause: Self) -> Self { let syntax = SyntaxVariant::TryStatement(ctx.get_arena().alloc(TryStatementChildren { keyword, compound_statement, catch_clauses, finally_clause, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_catch_clause(ctx: &C, keyword: Self, left_paren: Self, type_: Self, variable: Self, right_paren: Self, body: Self) -> Self { let syntax = SyntaxVariant::CatchClause(ctx.get_arena().alloc(CatchClauseChildren { keyword, left_paren, type_, variable, right_paren, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_finally_clause(ctx: &C, keyword: Self, body: Self) -> Self { let syntax = SyntaxVariant::FinallyClause(ctx.get_arena().alloc(FinallyClauseChildren { keyword, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_do_statement(ctx: &C, keyword: Self, body: Self, while_keyword: Self, left_paren: Self, condition: Self, right_paren: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::DoStatement(ctx.get_arena().alloc(DoStatementChildren { keyword, body, while_keyword, left_paren, condition, right_paren, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_for_statement(ctx: &C, keyword: Self, left_paren: Self, initializer: Self, first_semicolon: Self, control: Self, second_semicolon: Self, end_of_loop: Self, right_paren: Self, body: Self) -> Self { let syntax = SyntaxVariant::ForStatement(ctx.get_arena().alloc(ForStatementChildren { keyword, left_paren, initializer, first_semicolon, control, second_semicolon, end_of_loop, right_paren, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_foreach_statement(ctx: &C, keyword: Self, left_paren: Self, collection: Self, await_keyword: Self, as_: Self, key: Self, arrow: Self, value: Self, right_paren: Self, body: Self) -> Self { let syntax = SyntaxVariant::ForeachStatement(ctx.get_arena().alloc(ForeachStatementChildren { keyword, left_paren, collection, await_keyword, as_, key, arrow, value, right_paren, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_switch_statement(ctx: &C, keyword: Self, left_paren: Self, expression: Self, right_paren: Self, left_brace: Self, sections: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::SwitchStatement(ctx.get_arena().alloc(SwitchStatementChildren { keyword, left_paren, expression, right_paren, left_brace, sections, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_switch_section(ctx: &C, labels: Self, statements: Self, fallthrough: Self) -> Self { let syntax = SyntaxVariant::SwitchSection(ctx.get_arena().alloc(SwitchSectionChildren { labels, statements, fallthrough, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_switch_fallthrough(ctx: &C, keyword: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::SwitchFallthrough(ctx.get_arena().alloc(SwitchFallthroughChildren { keyword, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_case_label(ctx: &C, keyword: Self, expression: Self, colon: Self) -> Self { let syntax = SyntaxVariant::CaseLabel(ctx.get_arena().alloc(CaseLabelChildren { keyword, expression, colon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_default_label(ctx: &C, keyword: Self, colon: Self) -> Self { let syntax = SyntaxVariant::DefaultLabel(ctx.get_arena().alloc(DefaultLabelChildren { keyword, colon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_match_statement(ctx: &C, keyword: Self, left_paren: Self, expression: Self, right_paren: Self, left_brace: Self, arms: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::MatchStatement(ctx.get_arena().alloc(MatchStatementChildren { keyword, left_paren, expression, right_paren, left_brace, arms, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_match_statement_arm(ctx: &C, pattern: Self, arrow: Self, body: Self) -> Self { let syntax = SyntaxVariant::MatchStatementArm(ctx.get_arena().alloc(MatchStatementArmChildren { pattern, arrow, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_return_statement(ctx: &C, keyword: Self, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ReturnStatement(ctx.get_arena().alloc(ReturnStatementChildren { keyword, expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_yield_break_statement(ctx: &C, keyword: Self, break_: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::YieldBreakStatement(ctx.get_arena().alloc(YieldBreakStatementChildren { keyword, break_, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_throw_statement(ctx: &C, keyword: Self, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ThrowStatement(ctx.get_arena().alloc(ThrowStatementChildren { keyword, expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_break_statement(ctx: &C, keyword: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::BreakStatement(ctx.get_arena().alloc(BreakStatementChildren { keyword, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_continue_statement(ctx: &C, keyword: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ContinueStatement(ctx.get_arena().alloc(ContinueStatementChildren { keyword, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_echo_statement(ctx: &C, keyword: Self, expressions: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::EchoStatement(ctx.get_arena().alloc(EchoStatementChildren { keyword, expressions, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_concurrent_statement(ctx: &C, keyword: Self, statement: Self) -> Self { let syntax = SyntaxVariant::ConcurrentStatement(ctx.get_arena().alloc(ConcurrentStatementChildren { keyword, statement, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_simple_initializer(ctx: &C, equal: Self, value: Self) -> Self { let syntax = SyntaxVariant::SimpleInitializer(ctx.get_arena().alloc(SimpleInitializerChildren { equal, value, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_anonymous_class(ctx: &C, class_keyword: Self, left_paren: Self, argument_list: Self, right_paren: Self, extends_keyword: Self, extends_list: Self, implements_keyword: Self, implements_list: Self, body: Self) -> Self { let syntax = SyntaxVariant::AnonymousClass(ctx.get_arena().alloc(AnonymousClassChildren { class_keyword, left_paren, argument_list, right_paren, extends_keyword, extends_list, implements_keyword, implements_list, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_anonymous_function(ctx: &C, attribute_spec: Self, async_keyword: Self, function_keyword: Self, left_paren: Self, parameters: Self, right_paren: Self, ctx_list: Self, colon: Self, readonly_return: Self, type_: Self, use_: Self, body: Self) -> Self { let syntax = SyntaxVariant::AnonymousFunction(ctx.get_arena().alloc(AnonymousFunctionChildren { attribute_spec, async_keyword, function_keyword, left_paren, parameters, right_paren, ctx_list, colon, readonly_return, type_, use_, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_anonymous_function_use_clause(ctx: &C, keyword: Self, left_paren: Self, variables: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::AnonymousFunctionUseClause(ctx.get_arena().alloc(AnonymousFunctionUseClauseChildren { keyword, left_paren, variables, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_variable_pattern(ctx: &C, variable: Self) -> Self { let syntax = SyntaxVariant::VariablePattern(ctx.get_arena().alloc(VariablePatternChildren { variable, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_constructor_pattern(ctx: &C, constructor: Self, left_paren: Self, members: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ConstructorPattern(ctx.get_arena().alloc(ConstructorPatternChildren { constructor, left_paren, members, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_refinement_pattern(ctx: &C, variable: Self, colon: Self, specifier: Self) -> Self { let syntax = SyntaxVariant::RefinementPattern(ctx.get_arena().alloc(RefinementPatternChildren { variable, colon, specifier, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_lambda_expression(ctx: &C, attribute_spec: Self, async_: Self, signature: Self, arrow: Self, body: Self) -> Self { let syntax = SyntaxVariant::LambdaExpression(ctx.get_arena().alloc(LambdaExpressionChildren { attribute_spec, async_, signature, arrow, body, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_lambda_signature(ctx: &C, left_paren: Self, parameters: Self, right_paren: Self, contexts: Self, colon: Self, readonly_return: Self, type_: Self) -> Self { let syntax = SyntaxVariant::LambdaSignature(ctx.get_arena().alloc(LambdaSignatureChildren { left_paren, parameters, right_paren, contexts, colon, readonly_return, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_cast_expression(ctx: &C, left_paren: Self, type_: Self, right_paren: Self, operand: Self) -> Self { let syntax = SyntaxVariant::CastExpression(ctx.get_arena().alloc(CastExpressionChildren { left_paren, type_, right_paren, operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_scope_resolution_expression(ctx: &C, qualifier: Self, operator: Self, name: Self) -> Self { let syntax = SyntaxVariant::ScopeResolutionExpression(ctx.get_arena().alloc(ScopeResolutionExpressionChildren { qualifier, operator, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_member_selection_expression(ctx: &C, object: Self, operator: Self, name: Self) -> Self { let syntax = SyntaxVariant::MemberSelectionExpression(ctx.get_arena().alloc(MemberSelectionExpressionChildren { object, operator, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_safe_member_selection_expression(ctx: &C, object: Self, operator: Self, name: Self) -> Self { let syntax = SyntaxVariant::SafeMemberSelectionExpression(ctx.get_arena().alloc(SafeMemberSelectionExpressionChildren { object, operator, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_embedded_member_selection_expression(ctx: &C, object: Self, operator: Self, name: Self) -> Self { let syntax = SyntaxVariant::EmbeddedMemberSelectionExpression(ctx.get_arena().alloc(EmbeddedMemberSelectionExpressionChildren { object, operator, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_yield_expression(ctx: &C, keyword: Self, operand: Self) -> Self { let syntax = SyntaxVariant::YieldExpression(ctx.get_arena().alloc(YieldExpressionChildren { keyword, operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_prefix_unary_expression(ctx: &C, operator: Self, operand: Self) -> Self { let syntax = SyntaxVariant::PrefixUnaryExpression(ctx.get_arena().alloc(PrefixUnaryExpressionChildren { operator, operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_postfix_unary_expression(ctx: &C, operand: Self, operator: Self) -> Self { let syntax = SyntaxVariant::PostfixUnaryExpression(ctx.get_arena().alloc(PostfixUnaryExpressionChildren { operand, operator, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_binary_expression(ctx: &C, left_operand: Self, operator: Self, right_operand: Self) -> Self { let syntax = SyntaxVariant::BinaryExpression(ctx.get_arena().alloc(BinaryExpressionChildren { left_operand, operator, right_operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_is_expression(ctx: &C, left_operand: Self, operator: Self, right_operand: Self) -> Self { let syntax = SyntaxVariant::IsExpression(ctx.get_arena().alloc(IsExpressionChildren { left_operand, operator, right_operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_as_expression(ctx: &C, left_operand: Self, operator: Self, right_operand: Self) -> Self { let syntax = SyntaxVariant::AsExpression(ctx.get_arena().alloc(AsExpressionChildren { left_operand, operator, right_operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_nullable_as_expression(ctx: &C, left_operand: Self, operator: Self, right_operand: Self) -> Self { let syntax = SyntaxVariant::NullableAsExpression(ctx.get_arena().alloc(NullableAsExpressionChildren { left_operand, operator, right_operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_upcast_expression(ctx: &C, left_operand: Self, operator: Self, right_operand: Self) -> Self { let syntax = SyntaxVariant::UpcastExpression(ctx.get_arena().alloc(UpcastExpressionChildren { left_operand, operator, right_operand, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_conditional_expression(ctx: &C, test: Self, question: Self, consequence: Self, colon: Self, alternative: Self) -> Self { let syntax = SyntaxVariant::ConditionalExpression(ctx.get_arena().alloc(ConditionalExpressionChildren { test, question, consequence, colon, alternative, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_eval_expression(ctx: &C, keyword: Self, left_paren: Self, argument: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::EvalExpression(ctx.get_arena().alloc(EvalExpressionChildren { keyword, left_paren, argument, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_isset_expression(ctx: &C, keyword: Self, left_paren: Self, argument_list: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::IssetExpression(ctx.get_arena().alloc(IssetExpressionChildren { keyword, left_paren, argument_list, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_function_call_expression(ctx: &C, receiver: Self, type_args: Self, left_paren: Self, argument_list: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::FunctionCallExpression(ctx.get_arena().alloc(FunctionCallExpressionChildren { receiver, type_args, left_paren, argument_list, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_function_pointer_expression(ctx: &C, receiver: Self, type_args: Self) -> Self { let syntax = SyntaxVariant::FunctionPointerExpression(ctx.get_arena().alloc(FunctionPointerExpressionChildren { receiver, type_args, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_parenthesized_expression(ctx: &C, left_paren: Self, expression: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ParenthesizedExpression(ctx.get_arena().alloc(ParenthesizedExpressionChildren { left_paren, expression, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_braced_expression(ctx: &C, left_brace: Self, expression: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::BracedExpression(ctx.get_arena().alloc(BracedExpressionChildren { left_brace, expression, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_et_splice_expression(ctx: &C, dollar: Self, left_brace: Self, expression: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::ETSpliceExpression(ctx.get_arena().alloc(ETSpliceExpressionChildren { dollar, left_brace, expression, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_embedded_braced_expression(ctx: &C, left_brace: Self, expression: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::EmbeddedBracedExpression(ctx.get_arena().alloc(EmbeddedBracedExpressionChildren { left_brace, expression, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_list_expression(ctx: &C, keyword: Self, left_paren: Self, members: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ListExpression(ctx.get_arena().alloc(ListExpressionChildren { keyword, left_paren, members, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_collection_literal_expression(ctx: &C, name: Self, left_brace: Self, initializers: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::CollectionLiteralExpression(ctx.get_arena().alloc(CollectionLiteralExpressionChildren { name, left_brace, initializers, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_object_creation_expression(ctx: &C, new_keyword: Self, object: Self) -> Self { let syntax = SyntaxVariant::ObjectCreationExpression(ctx.get_arena().alloc(ObjectCreationExpressionChildren { new_keyword, object, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_constructor_call(ctx: &C, type_: Self, left_paren: Self, argument_list: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ConstructorCall(ctx.get_arena().alloc(ConstructorCallChildren { type_, left_paren, argument_list, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_darray_intrinsic_expression(ctx: &C, keyword: Self, explicit_type: Self, left_bracket: Self, members: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::DarrayIntrinsicExpression(ctx.get_arena().alloc(DarrayIntrinsicExpressionChildren { keyword, explicit_type, left_bracket, members, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_dictionary_intrinsic_expression(ctx: &C, keyword: Self, explicit_type: Self, left_bracket: Self, members: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::DictionaryIntrinsicExpression(ctx.get_arena().alloc(DictionaryIntrinsicExpressionChildren { keyword, explicit_type, left_bracket, members, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_keyset_intrinsic_expression(ctx: &C, keyword: Self, explicit_type: Self, left_bracket: Self, members: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::KeysetIntrinsicExpression(ctx.get_arena().alloc(KeysetIntrinsicExpressionChildren { keyword, explicit_type, left_bracket, members, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_varray_intrinsic_expression(ctx: &C, keyword: Self, explicit_type: Self, left_bracket: Self, members: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::VarrayIntrinsicExpression(ctx.get_arena().alloc(VarrayIntrinsicExpressionChildren { keyword, explicit_type, left_bracket, members, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_vector_intrinsic_expression(ctx: &C, keyword: Self, explicit_type: Self, left_bracket: Self, members: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::VectorIntrinsicExpression(ctx.get_arena().alloc(VectorIntrinsicExpressionChildren { keyword, explicit_type, left_bracket, members, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_element_initializer(ctx: &C, key: Self, arrow: Self, value: Self) -> Self { let syntax = SyntaxVariant::ElementInitializer(ctx.get_arena().alloc(ElementInitializerChildren { key, arrow, value, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_subscript_expression(ctx: &C, receiver: Self, left_bracket: Self, index: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::SubscriptExpression(ctx.get_arena().alloc(SubscriptExpressionChildren { receiver, left_bracket, index, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_embedded_subscript_expression(ctx: &C, receiver: Self, left_bracket: Self, index: Self, right_bracket: Self) -> Self { let syntax = SyntaxVariant::EmbeddedSubscriptExpression(ctx.get_arena().alloc(EmbeddedSubscriptExpressionChildren { receiver, left_bracket, index, right_bracket, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_awaitable_creation_expression(ctx: &C, attribute_spec: Self, async_: Self, compound_statement: Self) -> Self { let syntax = SyntaxVariant::AwaitableCreationExpression(ctx.get_arena().alloc(AwaitableCreationExpressionChildren { attribute_spec, async_, compound_statement, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_children_declaration(ctx: &C, keyword: Self, expression: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::XHPChildrenDeclaration(ctx.get_arena().alloc(XHPChildrenDeclarationChildren { keyword, expression, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_children_parenthesized_list(ctx: &C, left_paren: Self, xhp_children: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::XHPChildrenParenthesizedList(ctx.get_arena().alloc(XHPChildrenParenthesizedListChildren { left_paren, xhp_children, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_category_declaration(ctx: &C, keyword: Self, categories: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::XHPCategoryDeclaration(ctx.get_arena().alloc(XHPCategoryDeclarationChildren { keyword, categories, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_enum_type(ctx: &C, like: Self, keyword: Self, left_brace: Self, values: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::XHPEnumType(ctx.get_arena().alloc(XHPEnumTypeChildren { like, keyword, left_brace, values, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_lateinit(ctx: &C, at: Self, keyword: Self) -> Self { let syntax = SyntaxVariant::XHPLateinit(ctx.get_arena().alloc(XHPLateinitChildren { at, keyword, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_required(ctx: &C, at: Self, keyword: Self) -> Self { let syntax = SyntaxVariant::XHPRequired(ctx.get_arena().alloc(XHPRequiredChildren { at, keyword, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_class_attribute_declaration(ctx: &C, keyword: Self, attributes: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::XHPClassAttributeDeclaration(ctx.get_arena().alloc(XHPClassAttributeDeclarationChildren { keyword, attributes, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_class_attribute(ctx: &C, type_: Self, name: Self, initializer: Self, required: Self) -> Self { let syntax = SyntaxVariant::XHPClassAttribute(ctx.get_arena().alloc(XHPClassAttributeChildren { type_, name, initializer, required, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_simple_class_attribute(ctx: &C, type_: Self) -> Self { let syntax = SyntaxVariant::XHPSimpleClassAttribute(ctx.get_arena().alloc(XHPSimpleClassAttributeChildren { type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_simple_attribute(ctx: &C, name: Self, equal: Self, expression: Self) -> Self { let syntax = SyntaxVariant::XHPSimpleAttribute(ctx.get_arena().alloc(XHPSimpleAttributeChildren { name, equal, expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_spread_attribute(ctx: &C, left_brace: Self, spread_operator: Self, expression: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::XHPSpreadAttribute(ctx.get_arena().alloc(XHPSpreadAttributeChildren { left_brace, spread_operator, expression, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_open(ctx: &C, left_angle: Self, name: Self, attributes: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::XHPOpen(ctx.get_arena().alloc(XHPOpenChildren { left_angle, name, attributes, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_expression(ctx: &C, open: Self, body: Self, close: Self) -> Self { let syntax = SyntaxVariant::XHPExpression(ctx.get_arena().alloc(XHPExpressionChildren { open, body, close, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_xhp_close(ctx: &C, left_angle: Self, name: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::XHPClose(ctx.get_arena().alloc(XHPCloseChildren { left_angle, name, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_constant(ctx: &C, left_type: Self, separator: Self, right_type: Self) -> Self { let syntax = SyntaxVariant::TypeConstant(ctx.get_arena().alloc(TypeConstantChildren { left_type, separator, right_type, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_vector_type_specifier(ctx: &C, keyword: Self, left_angle: Self, type_: Self, trailing_comma: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::VectorTypeSpecifier(ctx.get_arena().alloc(VectorTypeSpecifierChildren { keyword, left_angle, type_, trailing_comma, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_keyset_type_specifier(ctx: &C, keyword: Self, left_angle: Self, type_: Self, trailing_comma: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::KeysetTypeSpecifier(ctx.get_arena().alloc(KeysetTypeSpecifierChildren { keyword, left_angle, type_, trailing_comma, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_tuple_type_explicit_specifier(ctx: &C, keyword: Self, left_angle: Self, types: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::TupleTypeExplicitSpecifier(ctx.get_arena().alloc(TupleTypeExplicitSpecifierChildren { keyword, left_angle, types, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_varray_type_specifier(ctx: &C, keyword: Self, left_angle: Self, type_: Self, trailing_comma: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::VarrayTypeSpecifier(ctx.get_arena().alloc(VarrayTypeSpecifierChildren { keyword, left_angle, type_, trailing_comma, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_function_ctx_type_specifier(ctx: &C, keyword: Self, variable: Self) -> Self { let syntax = SyntaxVariant::FunctionCtxTypeSpecifier(ctx.get_arena().alloc(FunctionCtxTypeSpecifierChildren { keyword, variable, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_parameter(ctx: &C, attribute_spec: Self, reified: Self, variance: Self, name: Self, param_params: Self, constraints: Self) -> Self { let syntax = SyntaxVariant::TypeParameter(ctx.get_arena().alloc(TypeParameterChildren { attribute_spec, reified, variance, name, param_params, constraints, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_constraint(ctx: &C, keyword: Self, type_: Self) -> Self { let syntax = SyntaxVariant::TypeConstraint(ctx.get_arena().alloc(TypeConstraintChildren { keyword, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_context_constraint(ctx: &C, keyword: Self, ctx_list: Self) -> Self { let syntax = SyntaxVariant::ContextConstraint(ctx.get_arena().alloc(ContextConstraintChildren { keyword, ctx_list, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_darray_type_specifier(ctx: &C, keyword: Self, left_angle: Self, key: Self, comma: Self, value: Self, trailing_comma: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::DarrayTypeSpecifier(ctx.get_arena().alloc(DarrayTypeSpecifierChildren { keyword, left_angle, key, comma, value, trailing_comma, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_dictionary_type_specifier(ctx: &C, keyword: Self, left_angle: Self, members: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::DictionaryTypeSpecifier(ctx.get_arena().alloc(DictionaryTypeSpecifierChildren { keyword, left_angle, members, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_closure_type_specifier(ctx: &C, outer_left_paren: Self, readonly_keyword: Self, function_keyword: Self, inner_left_paren: Self, parameter_list: Self, inner_right_paren: Self, contexts: Self, colon: Self, readonly_return: Self, return_type: Self, outer_right_paren: Self) -> Self { let syntax = SyntaxVariant::ClosureTypeSpecifier(ctx.get_arena().alloc(ClosureTypeSpecifierChildren { outer_left_paren, readonly_keyword, function_keyword, inner_left_paren, parameter_list, inner_right_paren, contexts, colon, readonly_return, return_type, outer_right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_closure_parameter_type_specifier(ctx: &C, call_convention: Self, readonly: Self, type_: Self) -> Self { let syntax = SyntaxVariant::ClosureParameterTypeSpecifier(ctx.get_arena().alloc(ClosureParameterTypeSpecifierChildren { call_convention, readonly, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_refinement(ctx: &C, type_: Self, keyword: Self, left_brace: Self, members: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::TypeRefinement(ctx.get_arena().alloc(TypeRefinementChildren { type_, keyword, left_brace, members, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_in_refinement(ctx: &C, keyword: Self, name: Self, type_parameters: Self, constraints: Self, equal: Self, type_: Self) -> Self { let syntax = SyntaxVariant::TypeInRefinement(ctx.get_arena().alloc(TypeInRefinementChildren { keyword, name, type_parameters, constraints, equal, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_ctx_in_refinement(ctx: &C, keyword: Self, name: Self, type_parameters: Self, constraints: Self, equal: Self, ctx_list: Self) -> Self { let syntax = SyntaxVariant::CtxInRefinement(ctx.get_arena().alloc(CtxInRefinementChildren { keyword, name, type_parameters, constraints, equal, ctx_list, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_classname_type_specifier(ctx: &C, keyword: Self, left_angle: Self, type_: Self, trailing_comma: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::ClassnameTypeSpecifier(ctx.get_arena().alloc(ClassnameTypeSpecifierChildren { keyword, left_angle, type_, trailing_comma, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_field_specifier(ctx: &C, question: Self, name: Self, arrow: Self, type_: Self) -> Self { let syntax = SyntaxVariant::FieldSpecifier(ctx.get_arena().alloc(FieldSpecifierChildren { question, name, arrow, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_field_initializer(ctx: &C, name: Self, arrow: Self, value: Self) -> Self { let syntax = SyntaxVariant::FieldInitializer(ctx.get_arena().alloc(FieldInitializerChildren { name, arrow, value, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_shape_type_specifier(ctx: &C, keyword: Self, left_paren: Self, fields: Self, ellipsis: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ShapeTypeSpecifier(ctx.get_arena().alloc(ShapeTypeSpecifierChildren { keyword, left_paren, fields, ellipsis, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_shape_expression(ctx: &C, keyword: Self, left_paren: Self, fields: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::ShapeExpression(ctx.get_arena().alloc(ShapeExpressionChildren { keyword, left_paren, fields, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_tuple_expression(ctx: &C, keyword: Self, left_paren: Self, items: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::TupleExpression(ctx.get_arena().alloc(TupleExpressionChildren { keyword, left_paren, items, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_generic_type_specifier(ctx: &C, class_type: Self, argument_list: Self) -> Self { let syntax = SyntaxVariant::GenericTypeSpecifier(ctx.get_arena().alloc(GenericTypeSpecifierChildren { class_type, argument_list, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_nullable_type_specifier(ctx: &C, question: Self, type_: Self) -> Self { let syntax = SyntaxVariant::NullableTypeSpecifier(ctx.get_arena().alloc(NullableTypeSpecifierChildren { question, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_like_type_specifier(ctx: &C, tilde: Self, type_: Self) -> Self { let syntax = SyntaxVariant::LikeTypeSpecifier(ctx.get_arena().alloc(LikeTypeSpecifierChildren { tilde, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_soft_type_specifier(ctx: &C, at: Self, type_: Self) -> Self { let syntax = SyntaxVariant::SoftTypeSpecifier(ctx.get_arena().alloc(SoftTypeSpecifierChildren { at, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_attributized_specifier(ctx: &C, attribute_spec: Self, type_: Self) -> Self { let syntax = SyntaxVariant::AttributizedSpecifier(ctx.get_arena().alloc(AttributizedSpecifierChildren { attribute_spec, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_reified_type_argument(ctx: &C, reified: Self, type_: Self) -> Self { let syntax = SyntaxVariant::ReifiedTypeArgument(ctx.get_arena().alloc(ReifiedTypeArgumentChildren { reified, type_, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_arguments(ctx: &C, left_angle: Self, types: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::TypeArguments(ctx.get_arena().alloc(TypeArgumentsChildren { left_angle, types, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_type_parameters(ctx: &C, left_angle: Self, parameters: Self, right_angle: Self) -> Self { let syntax = SyntaxVariant::TypeParameters(ctx.get_arena().alloc(TypeParametersChildren { left_angle, parameters, right_angle, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_tuple_type_specifier(ctx: &C, left_paren: Self, types: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::TupleTypeSpecifier(ctx.get_arena().alloc(TupleTypeSpecifierChildren { left_paren, types, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_union_type_specifier(ctx: &C, left_paren: Self, types: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::UnionTypeSpecifier(ctx.get_arena().alloc(UnionTypeSpecifierChildren { left_paren, types, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_intersection_type_specifier(ctx: &C, left_paren: Self, types: Self, right_paren: Self) -> Self { let syntax = SyntaxVariant::IntersectionTypeSpecifier(ctx.get_arena().alloc(IntersectionTypeSpecifierChildren { left_paren, types, right_paren, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_error(ctx: &C, error: Self) -> Self { let syntax = SyntaxVariant::ErrorSyntax(ctx.get_arena().alloc(ErrorSyntaxChildren { error, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_list_item(ctx: &C, item: Self, separator: Self) -> Self { let syntax = SyntaxVariant::ListItem(ctx.get_arena().alloc(ListItemChildren { item, separator, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_enum_class_label_expression(ctx: &C, qualifier: Self, hash: Self, expression: Self) -> Self { let syntax = SyntaxVariant::EnumClassLabelExpression(ctx.get_arena().alloc(EnumClassLabelExpressionChildren { qualifier, hash, expression, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_module_declaration(ctx: &C, attribute_spec: Self, new_keyword: Self, module_keyword: Self, name: Self, left_brace: Self, exports: Self, imports: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::ModuleDeclaration(ctx.get_arena().alloc(ModuleDeclarationChildren { attribute_spec, new_keyword, module_keyword, name, left_brace, exports, imports, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_module_exports(ctx: &C, exports_keyword: Self, left_brace: Self, exports: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::ModuleExports(ctx.get_arena().alloc(ModuleExportsChildren { exports_keyword, left_brace, exports, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_module_imports(ctx: &C, imports_keyword: Self, left_brace: Self, imports: Self, right_brace: Self) -> Self { let syntax = SyntaxVariant::ModuleImports(ctx.get_arena().alloc(ModuleImportsChildren { imports_keyword, left_brace, imports, right_brace, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_module_membership_declaration(ctx: &C, module_keyword: Self, name: Self, semicolon: Self) -> Self { let syntax = SyntaxVariant::ModuleMembershipDeclaration(ctx.get_arena().alloc(ModuleMembershipDeclarationChildren { module_keyword, name, semicolon, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } fn make_package_expression(ctx: &C, keyword: Self, name: Self) -> Self { let syntax = SyntaxVariant::PackageExpression(ctx.get_arena().alloc(PackageExpressionChildren { keyword, name, })); let value = V::from_values(syntax.iter_children().map(|child| &child.value)); Self::make(syntax, value) } }
Rust
hhvm/hphp/hack/src/parser/syntax_by_ref/syntax_variant_generated.rs
/** * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. An additional * directory. * ** * * THIS FILE IS @generated; DO NOT EDIT IT * To regenerate this file, run * * buck run //hphp/hack/src:generate_full_fidelity * ** * */ use super::{ syntax::Syntax, syntax_children_iterator::SyntaxChildrenIterator, }; #[derive(Debug, Clone)] pub enum SyntaxVariant<'a, T, V> { Token(T), Missing, SyntaxList(&'a [Syntax<'a, T, V>]), EndOfFile(&'a EndOfFileChildren<'a, T, V>), Script(&'a ScriptChildren<'a, T, V>), QualifiedName(&'a QualifiedNameChildren<'a, T, V>), ModuleName(&'a ModuleNameChildren<'a, T, V>), SimpleTypeSpecifier(&'a SimpleTypeSpecifierChildren<'a, T, V>), LiteralExpression(&'a LiteralExpressionChildren<'a, T, V>), PrefixedStringExpression(&'a PrefixedStringExpressionChildren<'a, T, V>), PrefixedCodeExpression(&'a PrefixedCodeExpressionChildren<'a, T, V>), VariableExpression(&'a VariableExpressionChildren<'a, T, V>), PipeVariableExpression(&'a PipeVariableExpressionChildren<'a, T, V>), FileAttributeSpecification(&'a FileAttributeSpecificationChildren<'a, T, V>), EnumDeclaration(&'a EnumDeclarationChildren<'a, T, V>), EnumUse(&'a EnumUseChildren<'a, T, V>), Enumerator(&'a EnumeratorChildren<'a, T, V>), EnumClassDeclaration(&'a EnumClassDeclarationChildren<'a, T, V>), EnumClassEnumerator(&'a EnumClassEnumeratorChildren<'a, T, V>), AliasDeclaration(&'a AliasDeclarationChildren<'a, T, V>), ContextAliasDeclaration(&'a ContextAliasDeclarationChildren<'a, T, V>), CaseTypeDeclaration(&'a CaseTypeDeclarationChildren<'a, T, V>), CaseTypeVariant(&'a CaseTypeVariantChildren<'a, T, V>), PropertyDeclaration(&'a PropertyDeclarationChildren<'a, T, V>), PropertyDeclarator(&'a PropertyDeclaratorChildren<'a, T, V>), NamespaceDeclaration(&'a NamespaceDeclarationChildren<'a, T, V>), NamespaceDeclarationHeader(&'a NamespaceDeclarationHeaderChildren<'a, T, V>), NamespaceBody(&'a NamespaceBodyChildren<'a, T, V>), NamespaceEmptyBody(&'a NamespaceEmptyBodyChildren<'a, T, V>), NamespaceUseDeclaration(&'a NamespaceUseDeclarationChildren<'a, T, V>), NamespaceGroupUseDeclaration(&'a NamespaceGroupUseDeclarationChildren<'a, T, V>), NamespaceUseClause(&'a NamespaceUseClauseChildren<'a, T, V>), FunctionDeclaration(&'a FunctionDeclarationChildren<'a, T, V>), FunctionDeclarationHeader(&'a FunctionDeclarationHeaderChildren<'a, T, V>), Contexts(&'a ContextsChildren<'a, T, V>), WhereClause(&'a WhereClauseChildren<'a, T, V>), WhereConstraint(&'a WhereConstraintChildren<'a, T, V>), MethodishDeclaration(&'a MethodishDeclarationChildren<'a, T, V>), MethodishTraitResolution(&'a MethodishTraitResolutionChildren<'a, T, V>), ClassishDeclaration(&'a ClassishDeclarationChildren<'a, T, V>), ClassishBody(&'a ClassishBodyChildren<'a, T, V>), TraitUse(&'a TraitUseChildren<'a, T, V>), RequireClause(&'a RequireClauseChildren<'a, T, V>), ConstDeclaration(&'a ConstDeclarationChildren<'a, T, V>), ConstantDeclarator(&'a ConstantDeclaratorChildren<'a, T, V>), TypeConstDeclaration(&'a TypeConstDeclarationChildren<'a, T, V>), ContextConstDeclaration(&'a ContextConstDeclarationChildren<'a, T, V>), DecoratedExpression(&'a DecoratedExpressionChildren<'a, T, V>), ParameterDeclaration(&'a ParameterDeclarationChildren<'a, T, V>), VariadicParameter(&'a VariadicParameterChildren<'a, T, V>), OldAttributeSpecification(&'a OldAttributeSpecificationChildren<'a, T, V>), AttributeSpecification(&'a AttributeSpecificationChildren<'a, T, V>), Attribute(&'a AttributeChildren<'a, T, V>), InclusionExpression(&'a InclusionExpressionChildren<'a, T, V>), InclusionDirective(&'a InclusionDirectiveChildren<'a, T, V>), CompoundStatement(&'a CompoundStatementChildren<'a, T, V>), ExpressionStatement(&'a ExpressionStatementChildren<'a, T, V>), MarkupSection(&'a MarkupSectionChildren<'a, T, V>), MarkupSuffix(&'a MarkupSuffixChildren<'a, T, V>), UnsetStatement(&'a UnsetStatementChildren<'a, T, V>), DeclareLocalStatement(&'a DeclareLocalStatementChildren<'a, T, V>), UsingStatementBlockScoped(&'a UsingStatementBlockScopedChildren<'a, T, V>), UsingStatementFunctionScoped(&'a UsingStatementFunctionScopedChildren<'a, T, V>), WhileStatement(&'a WhileStatementChildren<'a, T, V>), IfStatement(&'a IfStatementChildren<'a, T, V>), ElseClause(&'a ElseClauseChildren<'a, T, V>), TryStatement(&'a TryStatementChildren<'a, T, V>), CatchClause(&'a CatchClauseChildren<'a, T, V>), FinallyClause(&'a FinallyClauseChildren<'a, T, V>), DoStatement(&'a DoStatementChildren<'a, T, V>), ForStatement(&'a ForStatementChildren<'a, T, V>), ForeachStatement(&'a ForeachStatementChildren<'a, T, V>), SwitchStatement(&'a SwitchStatementChildren<'a, T, V>), SwitchSection(&'a SwitchSectionChildren<'a, T, V>), SwitchFallthrough(&'a SwitchFallthroughChildren<'a, T, V>), CaseLabel(&'a CaseLabelChildren<'a, T, V>), DefaultLabel(&'a DefaultLabelChildren<'a, T, V>), MatchStatement(&'a MatchStatementChildren<'a, T, V>), MatchStatementArm(&'a MatchStatementArmChildren<'a, T, V>), ReturnStatement(&'a ReturnStatementChildren<'a, T, V>), YieldBreakStatement(&'a YieldBreakStatementChildren<'a, T, V>), ThrowStatement(&'a ThrowStatementChildren<'a, T, V>), BreakStatement(&'a BreakStatementChildren<'a, T, V>), ContinueStatement(&'a ContinueStatementChildren<'a, T, V>), EchoStatement(&'a EchoStatementChildren<'a, T, V>), ConcurrentStatement(&'a ConcurrentStatementChildren<'a, T, V>), SimpleInitializer(&'a SimpleInitializerChildren<'a, T, V>), AnonymousClass(&'a AnonymousClassChildren<'a, T, V>), AnonymousFunction(&'a AnonymousFunctionChildren<'a, T, V>), AnonymousFunctionUseClause(&'a AnonymousFunctionUseClauseChildren<'a, T, V>), VariablePattern(&'a VariablePatternChildren<'a, T, V>), ConstructorPattern(&'a ConstructorPatternChildren<'a, T, V>), RefinementPattern(&'a RefinementPatternChildren<'a, T, V>), LambdaExpression(&'a LambdaExpressionChildren<'a, T, V>), LambdaSignature(&'a LambdaSignatureChildren<'a, T, V>), CastExpression(&'a CastExpressionChildren<'a, T, V>), ScopeResolutionExpression(&'a ScopeResolutionExpressionChildren<'a, T, V>), MemberSelectionExpression(&'a MemberSelectionExpressionChildren<'a, T, V>), SafeMemberSelectionExpression(&'a SafeMemberSelectionExpressionChildren<'a, T, V>), EmbeddedMemberSelectionExpression(&'a EmbeddedMemberSelectionExpressionChildren<'a, T, V>), YieldExpression(&'a YieldExpressionChildren<'a, T, V>), PrefixUnaryExpression(&'a PrefixUnaryExpressionChildren<'a, T, V>), PostfixUnaryExpression(&'a PostfixUnaryExpressionChildren<'a, T, V>), BinaryExpression(&'a BinaryExpressionChildren<'a, T, V>), IsExpression(&'a IsExpressionChildren<'a, T, V>), AsExpression(&'a AsExpressionChildren<'a, T, V>), NullableAsExpression(&'a NullableAsExpressionChildren<'a, T, V>), UpcastExpression(&'a UpcastExpressionChildren<'a, T, V>), ConditionalExpression(&'a ConditionalExpressionChildren<'a, T, V>), EvalExpression(&'a EvalExpressionChildren<'a, T, V>), IssetExpression(&'a IssetExpressionChildren<'a, T, V>), FunctionCallExpression(&'a FunctionCallExpressionChildren<'a, T, V>), FunctionPointerExpression(&'a FunctionPointerExpressionChildren<'a, T, V>), ParenthesizedExpression(&'a ParenthesizedExpressionChildren<'a, T, V>), BracedExpression(&'a BracedExpressionChildren<'a, T, V>), ETSpliceExpression(&'a ETSpliceExpressionChildren<'a, T, V>), EmbeddedBracedExpression(&'a EmbeddedBracedExpressionChildren<'a, T, V>), ListExpression(&'a ListExpressionChildren<'a, T, V>), CollectionLiteralExpression(&'a CollectionLiteralExpressionChildren<'a, T, V>), ObjectCreationExpression(&'a ObjectCreationExpressionChildren<'a, T, V>), ConstructorCall(&'a ConstructorCallChildren<'a, T, V>), DarrayIntrinsicExpression(&'a DarrayIntrinsicExpressionChildren<'a, T, V>), DictionaryIntrinsicExpression(&'a DictionaryIntrinsicExpressionChildren<'a, T, V>), KeysetIntrinsicExpression(&'a KeysetIntrinsicExpressionChildren<'a, T, V>), VarrayIntrinsicExpression(&'a VarrayIntrinsicExpressionChildren<'a, T, V>), VectorIntrinsicExpression(&'a VectorIntrinsicExpressionChildren<'a, T, V>), ElementInitializer(&'a ElementInitializerChildren<'a, T, V>), SubscriptExpression(&'a SubscriptExpressionChildren<'a, T, V>), EmbeddedSubscriptExpression(&'a EmbeddedSubscriptExpressionChildren<'a, T, V>), AwaitableCreationExpression(&'a AwaitableCreationExpressionChildren<'a, T, V>), XHPChildrenDeclaration(&'a XHPChildrenDeclarationChildren<'a, T, V>), XHPChildrenParenthesizedList(&'a XHPChildrenParenthesizedListChildren<'a, T, V>), XHPCategoryDeclaration(&'a XHPCategoryDeclarationChildren<'a, T, V>), XHPEnumType(&'a XHPEnumTypeChildren<'a, T, V>), XHPLateinit(&'a XHPLateinitChildren<'a, T, V>), XHPRequired(&'a XHPRequiredChildren<'a, T, V>), XHPClassAttributeDeclaration(&'a XHPClassAttributeDeclarationChildren<'a, T, V>), XHPClassAttribute(&'a XHPClassAttributeChildren<'a, T, V>), XHPSimpleClassAttribute(&'a XHPSimpleClassAttributeChildren<'a, T, V>), XHPSimpleAttribute(&'a XHPSimpleAttributeChildren<'a, T, V>), XHPSpreadAttribute(&'a XHPSpreadAttributeChildren<'a, T, V>), XHPOpen(&'a XHPOpenChildren<'a, T, V>), XHPExpression(&'a XHPExpressionChildren<'a, T, V>), XHPClose(&'a XHPCloseChildren<'a, T, V>), TypeConstant(&'a TypeConstantChildren<'a, T, V>), VectorTypeSpecifier(&'a VectorTypeSpecifierChildren<'a, T, V>), KeysetTypeSpecifier(&'a KeysetTypeSpecifierChildren<'a, T, V>), TupleTypeExplicitSpecifier(&'a TupleTypeExplicitSpecifierChildren<'a, T, V>), VarrayTypeSpecifier(&'a VarrayTypeSpecifierChildren<'a, T, V>), FunctionCtxTypeSpecifier(&'a FunctionCtxTypeSpecifierChildren<'a, T, V>), TypeParameter(&'a TypeParameterChildren<'a, T, V>), TypeConstraint(&'a TypeConstraintChildren<'a, T, V>), ContextConstraint(&'a ContextConstraintChildren<'a, T, V>), DarrayTypeSpecifier(&'a DarrayTypeSpecifierChildren<'a, T, V>), DictionaryTypeSpecifier(&'a DictionaryTypeSpecifierChildren<'a, T, V>), ClosureTypeSpecifier(&'a ClosureTypeSpecifierChildren<'a, T, V>), ClosureParameterTypeSpecifier(&'a ClosureParameterTypeSpecifierChildren<'a, T, V>), TypeRefinement(&'a TypeRefinementChildren<'a, T, V>), TypeInRefinement(&'a TypeInRefinementChildren<'a, T, V>), CtxInRefinement(&'a CtxInRefinementChildren<'a, T, V>), ClassnameTypeSpecifier(&'a ClassnameTypeSpecifierChildren<'a, T, V>), FieldSpecifier(&'a FieldSpecifierChildren<'a, T, V>), FieldInitializer(&'a FieldInitializerChildren<'a, T, V>), ShapeTypeSpecifier(&'a ShapeTypeSpecifierChildren<'a, T, V>), ShapeExpression(&'a ShapeExpressionChildren<'a, T, V>), TupleExpression(&'a TupleExpressionChildren<'a, T, V>), GenericTypeSpecifier(&'a GenericTypeSpecifierChildren<'a, T, V>), NullableTypeSpecifier(&'a NullableTypeSpecifierChildren<'a, T, V>), LikeTypeSpecifier(&'a LikeTypeSpecifierChildren<'a, T, V>), SoftTypeSpecifier(&'a SoftTypeSpecifierChildren<'a, T, V>), AttributizedSpecifier(&'a AttributizedSpecifierChildren<'a, T, V>), ReifiedTypeArgument(&'a ReifiedTypeArgumentChildren<'a, T, V>), TypeArguments(&'a TypeArgumentsChildren<'a, T, V>), TypeParameters(&'a TypeParametersChildren<'a, T, V>), TupleTypeSpecifier(&'a TupleTypeSpecifierChildren<'a, T, V>), UnionTypeSpecifier(&'a UnionTypeSpecifierChildren<'a, T, V>), IntersectionTypeSpecifier(&'a IntersectionTypeSpecifierChildren<'a, T, V>), ErrorSyntax(&'a ErrorSyntaxChildren<'a, T, V>), ListItem(&'a ListItemChildren<'a, T, V>), EnumClassLabelExpression(&'a EnumClassLabelExpressionChildren<'a, T, V>), ModuleDeclaration(&'a ModuleDeclarationChildren<'a, T, V>), ModuleExports(&'a ModuleExportsChildren<'a, T, V>), ModuleImports(&'a ModuleImportsChildren<'a, T, V>), ModuleMembershipDeclaration(&'a ModuleMembershipDeclarationChildren<'a, T, V>), PackageExpression(&'a PackageExpressionChildren<'a, T, V>), } #[derive(Debug, Clone)] pub struct EndOfFileChildren<'a, T, V> { pub token: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ScriptChildren<'a, T, V> { pub declarations: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct QualifiedNameChildren<'a, T, V> { pub parts: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ModuleNameChildren<'a, T, V> { pub parts: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SimpleTypeSpecifierChildren<'a, T, V> { pub specifier: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct LiteralExpressionChildren<'a, T, V> { pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PrefixedStringExpressionChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub str: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PrefixedCodeExpressionChildren<'a, T, V> { pub prefix: Syntax<'a, T, V>, pub left_backtick: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, pub right_backtick: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VariableExpressionChildren<'a, T, V> { pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PipeVariableExpressionChildren<'a, T, V> { pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FileAttributeSpecificationChildren<'a, T, V> { pub left_double_angle: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub attributes: Syntax<'a, T, V>, pub right_double_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub base: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub use_clauses: Syntax<'a, T, V>, pub enumerators: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumUseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub names: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumeratorChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumClassDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub enum_keyword: Syntax<'a, T, V>, pub class_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub base: Syntax<'a, T, V>, pub extends: Syntax<'a, T, V>, pub extends_list: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub elements: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumClassEnumeratorChildren<'a, T, V> { pub modifiers: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AliasDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub module_kw_opt: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub generic_parameter: Syntax<'a, T, V>, pub constraint: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ContextAliasDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub generic_parameter: Syntax<'a, T, V>, pub as_constraint: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub context: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CaseTypeDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub case_keyword: Syntax<'a, T, V>, pub type_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub generic_parameter: Syntax<'a, T, V>, pub as_: Syntax<'a, T, V>, pub bounds: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub variants: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CaseTypeVariantChildren<'a, T, V> { pub bar: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PropertyDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub declarators: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PropertyDeclaratorChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceDeclarationChildren<'a, T, V> { pub header: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceDeclarationHeaderChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceBodyChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub declarations: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceEmptyBodyChildren<'a, T, V> { pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceUseDeclarationChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub kind: Syntax<'a, T, V>, pub clauses: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceGroupUseDeclarationChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub kind: Syntax<'a, T, V>, pub prefix: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub clauses: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NamespaceUseClauseChildren<'a, T, V> { pub clause_kind: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub as_: Syntax<'a, T, V>, pub alias: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FunctionDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub declaration_header: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FunctionDeclarationHeaderChildren<'a, T, V> { pub modifiers: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameter_list: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub parameter_list: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub contexts: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub readonly_return: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub where_clause: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ContextsChildren<'a, T, V> { pub left_bracket: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct WhereClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub constraints: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct WhereConstraintChildren<'a, T, V> { pub left_type: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_type: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MethodishDeclarationChildren<'a, T, V> { pub attribute: Syntax<'a, T, V>, pub function_decl_header: Syntax<'a, T, V>, pub function_body: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MethodishTraitResolutionChildren<'a, T, V> { pub attribute: Syntax<'a, T, V>, pub function_decl_header: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ClassishDeclarationChildren<'a, T, V> { pub attribute: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub xhp: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameters: Syntax<'a, T, V>, pub extends_keyword: Syntax<'a, T, V>, pub extends_list: Syntax<'a, T, V>, pub implements_keyword: Syntax<'a, T, V>, pub implements_list: Syntax<'a, T, V>, pub where_clause: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ClassishBodyChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub elements: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TraitUseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub names: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct RequireClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub kind: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConstDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub type_specifier: Syntax<'a, T, V>, pub declarators: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConstantDeclaratorChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeConstDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub modifiers: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub type_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameters: Syntax<'a, T, V>, pub type_constraints: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub type_specifier: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ContextConstDeclarationChildren<'a, T, V> { pub modifiers: Syntax<'a, T, V>, pub const_keyword: Syntax<'a, T, V>, pub ctx_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameters: Syntax<'a, T, V>, pub constraint: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub ctx_list: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DecoratedExpressionChildren<'a, T, V> { pub decorator: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ParameterDeclarationChildren<'a, T, V> { pub attribute: Syntax<'a, T, V>, pub visibility: Syntax<'a, T, V>, pub call_convention: Syntax<'a, T, V>, pub readonly: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub default_value: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VariadicParameterChildren<'a, T, V> { pub call_convention: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub ellipsis: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct OldAttributeSpecificationChildren<'a, T, V> { pub left_double_angle: Syntax<'a, T, V>, pub attributes: Syntax<'a, T, V>, pub right_double_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AttributeSpecificationChildren<'a, T, V> { pub attributes: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AttributeChildren<'a, T, V> { pub at: Syntax<'a, T, V>, pub attribute_name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct InclusionExpressionChildren<'a, T, V> { pub require: Syntax<'a, T, V>, pub filename: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct InclusionDirectiveChildren<'a, T, V> { pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CompoundStatementChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub statements: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ExpressionStatementChildren<'a, T, V> { pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MarkupSectionChildren<'a, T, V> { pub hashbang: Syntax<'a, T, V>, pub suffix: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MarkupSuffixChildren<'a, T, V> { pub less_than_question: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct UnsetStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub variables: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DeclareLocalStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub variable: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct UsingStatementBlockScopedChildren<'a, T, V> { pub await_keyword: Syntax<'a, T, V>, pub using_keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub expressions: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct UsingStatementFunctionScopedChildren<'a, T, V> { pub await_keyword: Syntax<'a, T, V>, pub using_keyword: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct WhileStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub condition: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct IfStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub condition: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub statement: Syntax<'a, T, V>, pub else_clause: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ElseClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub statement: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TryStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub compound_statement: Syntax<'a, T, V>, pub catch_clauses: Syntax<'a, T, V>, pub finally_clause: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CatchClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub variable: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FinallyClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DoStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, pub while_keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub condition: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ForStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, pub first_semicolon: Syntax<'a, T, V>, pub control: Syntax<'a, T, V>, pub second_semicolon: Syntax<'a, T, V>, pub end_of_loop: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ForeachStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub collection: Syntax<'a, T, V>, pub await_keyword: Syntax<'a, T, V>, pub as_: Syntax<'a, T, V>, pub key: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SwitchStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub sections: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SwitchSectionChildren<'a, T, V> { pub labels: Syntax<'a, T, V>, pub statements: Syntax<'a, T, V>, pub fallthrough: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SwitchFallthroughChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CaseLabelChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DefaultLabelChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MatchStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub arms: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MatchStatementArmChildren<'a, T, V> { pub pattern: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ReturnStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct YieldBreakStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub break_: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ThrowStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct BreakStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ContinueStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EchoStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub expressions: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConcurrentStatementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub statement: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SimpleInitializerChildren<'a, T, V> { pub equal: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AnonymousClassChildren<'a, T, V> { pub class_keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub argument_list: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub extends_keyword: Syntax<'a, T, V>, pub extends_list: Syntax<'a, T, V>, pub implements_keyword: Syntax<'a, T, V>, pub implements_list: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AnonymousFunctionChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub async_keyword: Syntax<'a, T, V>, pub function_keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub parameters: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub ctx_list: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub readonly_return: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub use_: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AnonymousFunctionUseClauseChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub variables: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VariablePatternChildren<'a, T, V> { pub variable: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConstructorPatternChildren<'a, T, V> { pub constructor: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct RefinementPatternChildren<'a, T, V> { pub variable: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub specifier: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct LambdaExpressionChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub async_: Syntax<'a, T, V>, pub signature: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct LambdaSignatureChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub parameters: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub contexts: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub readonly_return: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CastExpressionChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, pub operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ScopeResolutionExpressionChildren<'a, T, V> { pub qualifier: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct MemberSelectionExpressionChildren<'a, T, V> { pub object: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SafeMemberSelectionExpressionChildren<'a, T, V> { pub object: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EmbeddedMemberSelectionExpressionChildren<'a, T, V> { pub object: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct YieldExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PrefixUnaryExpressionChildren<'a, T, V> { pub operator: Syntax<'a, T, V>, pub operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PostfixUnaryExpressionChildren<'a, T, V> { pub operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct BinaryExpressionChildren<'a, T, V> { pub left_operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct IsExpressionChildren<'a, T, V> { pub left_operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AsExpressionChildren<'a, T, V> { pub left_operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NullableAsExpressionChildren<'a, T, V> { pub left_operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct UpcastExpressionChildren<'a, T, V> { pub left_operand: Syntax<'a, T, V>, pub operator: Syntax<'a, T, V>, pub right_operand: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConditionalExpressionChildren<'a, T, V> { pub test: Syntax<'a, T, V>, pub question: Syntax<'a, T, V>, pub consequence: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub alternative: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EvalExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub argument: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct IssetExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub argument_list: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FunctionCallExpressionChildren<'a, T, V> { pub receiver: Syntax<'a, T, V>, pub type_args: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub argument_list: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FunctionPointerExpressionChildren<'a, T, V> { pub receiver: Syntax<'a, T, V>, pub type_args: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ParenthesizedExpressionChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct BracedExpressionChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ETSpliceExpressionChildren<'a, T, V> { pub dollar: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EmbeddedBracedExpressionChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ListExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CollectionLiteralExpressionChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub initializers: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ObjectCreationExpressionChildren<'a, T, V> { pub new_keyword: Syntax<'a, T, V>, pub object: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ConstructorCallChildren<'a, T, V> { pub type_: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub argument_list: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DarrayIntrinsicExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub explicit_type: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DictionaryIntrinsicExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub explicit_type: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct KeysetIntrinsicExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub explicit_type: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VarrayIntrinsicExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub explicit_type: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VectorIntrinsicExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub explicit_type: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ElementInitializerChildren<'a, T, V> { pub key: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SubscriptExpressionChildren<'a, T, V> { pub receiver: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub index: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EmbeddedSubscriptExpressionChildren<'a, T, V> { pub receiver: Syntax<'a, T, V>, pub left_bracket: Syntax<'a, T, V>, pub index: Syntax<'a, T, V>, pub right_bracket: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AwaitableCreationExpressionChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub async_: Syntax<'a, T, V>, pub compound_statement: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPChildrenDeclarationChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPChildrenParenthesizedListChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub xhp_children: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPCategoryDeclarationChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub categories: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPEnumTypeChildren<'a, T, V> { pub like: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub values: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPLateinitChildren<'a, T, V> { pub at: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPRequiredChildren<'a, T, V> { pub at: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPClassAttributeDeclarationChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub attributes: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPClassAttributeChildren<'a, T, V> { pub type_: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub initializer: Syntax<'a, T, V>, pub required: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPSimpleClassAttributeChildren<'a, T, V> { pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPSimpleAttributeChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPSpreadAttributeChildren<'a, T, V> { pub left_brace: Syntax<'a, T, V>, pub spread_operator: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPOpenChildren<'a, T, V> { pub left_angle: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub attributes: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPExpressionChildren<'a, T, V> { pub open: Syntax<'a, T, V>, pub body: Syntax<'a, T, V>, pub close: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct XHPCloseChildren<'a, T, V> { pub left_angle: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeConstantChildren<'a, T, V> { pub left_type: Syntax<'a, T, V>, pub separator: Syntax<'a, T, V>, pub right_type: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VectorTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub trailing_comma: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct KeysetTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub trailing_comma: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TupleTypeExplicitSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct VarrayTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub trailing_comma: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FunctionCtxTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub variable: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeParameterChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub reified: Syntax<'a, T, V>, pub variance: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub param_params: Syntax<'a, T, V>, pub constraints: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeConstraintChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ContextConstraintChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub ctx_list: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DarrayTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub key: Syntax<'a, T, V>, pub comma: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, pub trailing_comma: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct DictionaryTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ClosureTypeSpecifierChildren<'a, T, V> { pub outer_left_paren: Syntax<'a, T, V>, pub readonly_keyword: Syntax<'a, T, V>, pub function_keyword: Syntax<'a, T, V>, pub inner_left_paren: Syntax<'a, T, V>, pub parameter_list: Syntax<'a, T, V>, pub inner_right_paren: Syntax<'a, T, V>, pub contexts: Syntax<'a, T, V>, pub colon: Syntax<'a, T, V>, pub readonly_return: Syntax<'a, T, V>, pub return_type: Syntax<'a, T, V>, pub outer_right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ClosureParameterTypeSpecifierChildren<'a, T, V> { pub call_convention: Syntax<'a, T, V>, pub readonly: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeRefinementChildren<'a, T, V> { pub type_: Syntax<'a, T, V>, pub keyword: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub members: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeInRefinementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameters: Syntax<'a, T, V>, pub constraints: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct CtxInRefinementChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub type_parameters: Syntax<'a, T, V>, pub constraints: Syntax<'a, T, V>, pub equal: Syntax<'a, T, V>, pub ctx_list: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ClassnameTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_angle: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, pub trailing_comma: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FieldSpecifierChildren<'a, T, V> { pub question: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct FieldInitializerChildren<'a, T, V> { pub name: Syntax<'a, T, V>, pub arrow: Syntax<'a, T, V>, pub value: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ShapeTypeSpecifierChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub fields: Syntax<'a, T, V>, pub ellipsis: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ShapeExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub fields: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TupleExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub left_paren: Syntax<'a, T, V>, pub items: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct GenericTypeSpecifierChildren<'a, T, V> { pub class_type: Syntax<'a, T, V>, pub argument_list: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct NullableTypeSpecifierChildren<'a, T, V> { pub question: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct LikeTypeSpecifierChildren<'a, T, V> { pub tilde: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct SoftTypeSpecifierChildren<'a, T, V> { pub at: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct AttributizedSpecifierChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ReifiedTypeArgumentChildren<'a, T, V> { pub reified: Syntax<'a, T, V>, pub type_: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeArgumentsChildren<'a, T, V> { pub left_angle: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TypeParametersChildren<'a, T, V> { pub left_angle: Syntax<'a, T, V>, pub parameters: Syntax<'a, T, V>, pub right_angle: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct TupleTypeSpecifierChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct UnionTypeSpecifierChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct IntersectionTypeSpecifierChildren<'a, T, V> { pub left_paren: Syntax<'a, T, V>, pub types: Syntax<'a, T, V>, pub right_paren: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ErrorSyntaxChildren<'a, T, V> { pub error: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ListItemChildren<'a, T, V> { pub item: Syntax<'a, T, V>, pub separator: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct EnumClassLabelExpressionChildren<'a, T, V> { pub qualifier: Syntax<'a, T, V>, pub hash: Syntax<'a, T, V>, pub expression: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ModuleDeclarationChildren<'a, T, V> { pub attribute_spec: Syntax<'a, T, V>, pub new_keyword: Syntax<'a, T, V>, pub module_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub exports: Syntax<'a, T, V>, pub imports: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ModuleExportsChildren<'a, T, V> { pub exports_keyword: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub exports: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ModuleImportsChildren<'a, T, V> { pub imports_keyword: Syntax<'a, T, V>, pub left_brace: Syntax<'a, T, V>, pub imports: Syntax<'a, T, V>, pub right_brace: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct ModuleMembershipDeclarationChildren<'a, T, V> { pub module_keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, pub semicolon: Syntax<'a, T, V>, } #[derive(Debug, Clone)] pub struct PackageExpressionChildren<'a, T, V> { pub keyword: Syntax<'a, T, V>, pub name: Syntax<'a, T, V>, } impl<'a, T, V> SyntaxVariant<'a, T, V> { pub fn iter_children(&'a self) -> SyntaxChildrenIterator<'a, T, V> { SyntaxChildrenIterator { syntax: &self, index: 0, index_back: 0, } } }
hhvm/hphp/hack/src/ppx/dune
(library (name ppx_gen_hhi) (wrapped false) (modules ppx_gen_hhi) (kind ppx_rewriter) (libraries hhi_get ocaml-compiler-libs.common ppxlib))
OCaml
hhvm/hphp/hack/src/ppx/ppx_gen_hhi.ml
(* * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "flow" directory of this source tree. * *) (* Ppxlib based PPX, used by both dune and BUCK. * This file is the dune entry point. *) open Ppxlib (** Read hhi and hsl files from the file system into memory, as AST nodes, to used by the ppx rewrite to replace [%hhi_contents] with this. *) let get_hhi_contents ~loc hhi_dir hsl_dir = let open Ast_builder in Hhi_get.get_hhis hhi_dir hsl_dir |> List.map (fun (name, contents) -> Default.pexp_tuple ~loc [Default.estring ~loc name; Default.estring ~loc contents]) |> Default.pexp_array ~loc let hhi_dir : string option ref = ref None let hsl_dir : string option ref = ref None let hhi_cache = ref None (* Whenever we see [%hhi_contents], replace it with all of the hhis *) let expand_function ~loc ~path:_ = match !hhi_cache with | Some result -> result | None -> let hhi_dir = match !hhi_dir with | None -> raise (Arg.Bad "-hhi-dir is mandatory") | Some dir -> dir in let hsl_dir = match !hsl_dir with | None -> raise (Arg.Bad "-hsl-dir is mandatory") | Some dir -> dir in let result = get_hhi_contents ~loc hhi_dir hsl_dir in hhi_cache := Some result; result let rule = Context_free.Rule.extension (Extension.declare "hhi_contents" Extension.Context.expression Ast_pattern.(pstr nil) expand_function) let set_hhi_dir dir = hhi_dir := Some dir let set_hsl_dir dir = hsl_dir := Some dir let () = Driver.add_arg "-hhi-dir" (Arg.String set_hhi_dir) ~doc:"<dir> directory of the hhis sources"; Driver.add_arg "-hsl-dir" (Arg.String set_hsl_dir) ~doc:"<dir> directory of the generated HHIs for the HSL"; Driver.register_transformation ~rules:[rule] "ppx_gen_hhi"
OCaml
hhvm/hphp/hack/src/ppx/ppx_gen_hhi_direct.ml
(* * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "flow" directory of this source tree. * *) (* Ppxlib based PPX, BUCK entry point*) let () = Ppxlib.Driver.standalone ()
hhvm/hphp/hack/src/ppx-transform/dune
(library (name ppx_transform) (wrapped false) (modules ppx_transform) (kind ppx_rewriter) (libraries ppxlib) (preprocess (pps ppxlib.metaquot)))
OCaml
hhvm/hphp/hack/src/ppx-transform/ppx_transform.ml
open Ppxlib open Ast_builder.Default module IMap = Map.Make (Int) module SMap = Map.Make (String) module SSet = Set.Make (String) module Help = struct let unzip xys = List.fold_right (fun (x, y) (xs, ys) -> (x :: xs, y :: ys)) xys ([], []) let smap_of_list xs = List.fold_left (fun acc (k, v) -> SMap.add k v acc) SMap.empty xs let value_exn = function | Some x -> x | _ -> failwith "Expected `Some ...` but got `None`" (** Generate nice type variable names: `'a,'b,...,'z,'a1,...` *) let tyvar_name i = let (c, n) = (i mod 26, i / 26) in let a = String.make 1 @@ char_of_int (c + 97) and v = if n = 0 then "" else string_of_int n in a ^ v end module Names = struct let pass_module_name = "Pass" let combine_fn_name = "combine" let identity_name = "identity" let pass_field_pfx = "on" let transform_pfx = "transform" let traverse_pfx = "traverse" let bottom_up_arg = "bottom_up" let top_down_arg = "top_down" let ctx_arg = "ctx" let stop_variant_label = "Stop" let continue_variant_label = "Continue" let restart_variant_label = "Restart" let opaque_attr = "transform.opaque" let explicit_attr = "transform.explicit" end module Err = struct let raise_unsupported_ty loc = Location.raise_errorf ~loc "The `ppx_traverse` preprocessor does not support this type" let unsupported_ty loc = Location.error_extensionf ~loc "The `ppx_traverse` preprocessor does not support this type" let unsupported_open loc = Location.error_extensionf ~loc "Unsupported use of trasform (you can only use it on a closed types)" let unsupported_abstract loc = Location.error_extensionf ~loc "Unsupported use of traverse (you can only use it on non-abstract types)" let unsupported_ctor_args loc = Location.error_extensionf ~loc "Explicit transforms of inline records are not supported by the `ppx_transform` preprocessor" let unsupported_ctor_args_empty loc = Location.error_extensionf ~loc "Explicit transforms of nullary data constructors are not supported by the `ppx_transform` preprocessor" end module Restart = struct type encoding = | Encode_as_variant | Encode_as_result type t = | Allow | Disallow of encoding let allowed = function | Allow -> true | Disallow _ -> false let encoding = function | Allow -> Encode_as_variant | Disallow encoding -> encoding end module Annot = struct (** Interpreted annotations for the ppx: - [Opaque] means we will not transform a type; - [Explicit] means we will generate an explicit [transform_...] function for something other than a top-level declaration. It can be attached to either a record field or a variant constructor. It makes no sense to have both on a given element so we represent as a sum. *) type t = | Opaque | Explicit let of_attributes attrs = let rec aux = function | [] -> None | { attr_name = { txt; _ }; _ } :: rest -> if String.equal txt Names.explicit_attr then Some Explicit else if String.equal txt Names.opaque_attr then Some Opaque else aux rest in aux attrs let has_opaque_attr attrs = List.exists (fun { attr_name = { txt; _ }; _ } -> String.equal Names.opaque_attr txt) attrs let has_explicit_attr attrs = List.exists (fun { attr_name = { txt; _ }; _ } -> String.equal Names.explicit_attr txt) attrs end module Core_ty = struct (** Get the name of a type variable failing when the [core_type] is not a type variable *) let tyvar_exn { ptyp_desc; _ } = match ptyp_desc with | Ptyp_var nm -> nm | Ptyp_any -> gen_symbol () | _ -> failwith "Expected a type variable" let ctor_longident_exn { ptyp_desc; _ } = match ptyp_desc with | Ptyp_constr (ident, _) -> ident | _ -> failwith "Expected a type constructor" (** Replace each named type variable in a [core_type] using the provided substitution. This will fail if the substitution does not contain the names of all tyvars encountered *) let rename_tyvars t ~subst = let visitor = object inherit Ast_traverse.map as super method! core_type core_type = let core_type = super#core_type core_type in match core_type.ptyp_desc with | Ptyp_var nm -> ptyp_var ~loc:core_type.ptyp_loc @@ Option.value ~default:nm @@ SMap.find_opt nm subst | _ -> core_type end in visitor#core_type t (** Replace tyvars with type constructors corresponding to newtypes declaration for locally abstract types *) let newtypes = let visitor = object inherit Ast_traverse.map as super method! core_type core_type = let core_type = super#core_type core_type in match core_type.ptyp_desc with | Ptyp_var nm -> let loc = core_type.ptyp_loc in ptyp_constr ~loc { loc; txt = lident nm } [] | _ -> core_type end in (fun ty -> visitor#core_type ty) (** Collect the unique tyvar names with a [core_type] *) let tyvars = let visitor = object inherit [label list] Ast_traverse.fold as super method! core_type core_type acc = let acc = super#core_type core_type acc in match core_type.ptyp_desc with | Ptyp_var v when not @@ List.exists (( = ) v) acc -> v :: acc | _ -> acc end in (fun ?(acc = []) ty -> visitor#core_type ty acc) let builtin_prims = SSet.of_list [ "int"; "Int.t"; "int32"; "Int32.t"; "int64"; "Int64.t"; "nativeint"; "Nativeint.t"; "float"; "Float.t"; "bool"; "Bool.t"; "char"; "Char.t"; "string"; "String.t"; "bytes"; "Bytes.t"; "unit"; "Unit.t"; ] let builtin_tycons = SSet.of_list [ "ref"; "result"; "Result.result"; "Result.t"; "option"; "Option.t"; "list"; "List.t"; "array"; "Array.t"; "lazy_t"; "Lazy.t"; "Either.t"; ] let builtin = SSet.union builtin_prims builtin_tycons (** Collect all type constructors referenced in a [core_type] _excluding_ those in the provided set - note that the excluded ctors are always a set of type declarations local to the module we are deriving for *) let tycons = let visitor excluded = object inherit [label list SMap.t] Ast_traverse.fold as super method! core_type core_type acc = if Annot.has_opaque_attr core_type.ptyp_attributes then acc else super#core_type core_type @@ match core_type.ptyp_desc with | Ptyp_constr ({ txt; _ }, _) -> let (tyname, key, path) = match List.rev @@ Longident.flatten_exn txt with | ty :: path -> ( String.concat "." @@ List.rev (ty :: path), String.concat "" path, path ) | _ -> failwith "Bad `Longident`" in if SSet.mem tyname excluded then acc else SMap.add key path acc | _ -> acc end in fun acc ty ~excluded -> let visitor = visitor @@ SSet.union builtin excluded in visitor#core_type ty acc end module Ident = struct (** Each field in our [pass] record type corresponds to either a some element of the types for which we are deriving or some externally defined type (see [pass_field] for more information). For fields corresponding to types for which we are deriving, a field is generated for each of: - a type declaration; - a constructor of a variant type marked [[@transform.explicit]]; or - a field of a record type marked [[@transform.explicit]]. The [transform_field_ident] type tracks which of these we have and is used to generate the name of the field *) type t = | Type of string | Ctor of string * string | Field of string * string let flatten = function | Type ty_name -> [ty_name] | Ctor (ty_name, ctor_name) -> [ty_name; ctor_name] | Field (ty_name, fld_name) -> [ty_name; fld_name] let to_string t = String.concat "_" @@ flatten t let kind_string t = match t with | Type _ -> "ty" | Ctor _ -> "ctor" | Field _ -> "fld" let prefixed pfx t = match (t, flatten t) with | (Type _, ["t"]) -> [pfx] | (_, sfx) -> let kind = kind_string t in pfx :: kind :: sfx let field_name t = let sfx = flatten t and kind = kind_string t in String.concat "_" (Names.pass_field_pfx :: kind :: sfx) let transform_fn_name t = String.concat "_" @@ prefixed Names.transform_pfx t let traverse_fn_name t = String.concat "_" @@ prefixed Names.traverse_pfx t end module Record_field = struct (** Simplified [label_declaration] representation *) type t = { label: label; ty: core_type; loc: location; } let of_lbl_decl { pld_name = { txt; _ }; pld_type; pld_loc; pld_attributes; _ } ~subst = ( { label = txt; ty = Core_ty.rename_tyvars pld_type ~subst; loc = pld_loc }, Annot.of_attributes pld_attributes ) end module Variant_ctor = struct (** Simplified [constructor_declaration] representation *) type t = | Constant_ctor of string * location | Single_ctor of string * location * core_type | Tuple_ctor of string * location * core_type list | Record_ctor of string * location * (Record_field.t * Annot.t option) list let of_ctor_decl { pcd_name = { txt; _ }; pcd_args; pcd_loc; pcd_attributes; _ } ~subst = let annot_opt = Annot.of_attributes pcd_attributes in match pcd_args with | Pcstr_tuple [] -> (Constant_ctor (txt, pcd_loc), annot_opt) | Pcstr_tuple [ty] -> (Single_ctor (txt, pcd_loc, Core_ty.rename_tyvars ty ~subst), annot_opt) | Pcstr_tuple tys -> ( Tuple_ctor (txt, pcd_loc, List.map (Core_ty.rename_tyvars ~subst) tys), annot_opt ) | Pcstr_record lbl_decls -> ( Record_ctor (txt, pcd_loc, List.map (Record_field.of_lbl_decl ~subst) lbl_decls), annot_opt ) end module Variant = struct type t = { name: string; loc: location; tyvars: label list; ctors: (Variant_ctor.t * Annot.t option) list; opaque: bool; } end module Gadt = struct type t = { name: string; loc: location; tyvars: label list; ctors: ((Variant_ctor.t * Annot.t option) * core_type) list; opaque: bool; } end module Record = struct type t = { name: string; loc: location; tyvars: label list; fields: (Record_field.t * Annot.t option) list; opaque: bool; } end module Alias = struct type t = { name: string; loc: location; tyvars: label list; ty: core_type; opaque: bool; } end module Unsupported = struct type kind = | Abstract | Open type t = { loc: location; ident: Ident.t; kind: kind; } end module Decl = struct (** Simplified [type_declaration] representation *) type t = | Variant of Variant.t | Gadt of Gadt.t | Record of Record.t | Alias of Alias.t | Unsupported of Unsupported.t let is_gadt = function | [] -> false | { pcd_res; _ } :: _ -> (match pcd_res with | Some _ -> true | _ -> false) let of_ty_decl { ptype_name = { txt = name; _ }; ptype_loc = loc; ptype_params; ptype_kind; ptype_manifest; ptype_attributes; _; } = let opaque = Annot.has_opaque_attr ptype_attributes and (subst, tyvars) = let (subst, tyvars) = Help.unzip @@ List.mapi (fun i (ty, _) -> let new_nm = Help.tyvar_name i in let old_nm = Core_ty.tyvar_exn ty in ((old_nm, new_nm), new_nm)) ptype_params in let subst = List.fold_left (fun acc (k, v) -> SMap.add k v acc) SMap.empty subst in (subst, tyvars) in match ptype_kind with | Ptype_variant ctor_decls when is_gadt ctor_decls -> let ctors = List.map (fun ctor_decl -> ( Variant_ctor.of_ctor_decl ctor_decl ~subst, Core_ty.rename_tyvars ~subst @@ Help.value_exn ctor_decl.pcd_res )) ctor_decls in Gadt Gadt.{ name; loc; tyvars; ctors; opaque } | Ptype_variant ctor_decls -> let ctors = List.map (Variant_ctor.of_ctor_decl ~subst) ctor_decls in Variant Variant.{ name; loc; tyvars; ctors; opaque } | Ptype_record lbl_decls -> let fields = List.map (Record_field.of_lbl_decl ~subst) lbl_decls in Record Record.{ name; loc; tyvars; fields; opaque } | Ptype_abstract -> (match ptype_manifest with | Some ty -> let ty = Core_ty.rename_tyvars ty ~subst in Alias Alias.{ name; loc; tyvars; ty; opaque } | None -> Unsupported Unsupported.{ ident = Ident.Type name; loc; kind = Abstract }) | Ptype_open -> Unsupported Unsupported.{ ident = Ident.Type name; loc; kind = Open } let opaque_info t = match t with | Gadt Gadt.{ name; opaque; _ } | Variant Variant.{ name; opaque; _ } | Record Record.{ name; opaque; _ } | Alias Alias.{ name; opaque; _ } -> (name, opaque) | Unsupported Unsupported.{ ident; _ } -> (* Use [to_string] since this is always a type *) (Ident.to_string ident, false) let name t = match t with | Gadt Gadt.{ name; _ } | Variant Variant.{ name; _ } | Record Record.{ name; _ } | Alias Alias.{ name; _ } -> name | Unsupported Unsupported.{ ident; _ } -> Ident.to_string ident let is_nonregular_in_core_ty tyname tyvars ty = let rec aux { ptyp_desc; _ } = match ptyp_desc with | Ptyp_constr ({ txt; _ }, tys) -> (match Longident.flatten_exn txt with | [nm] when String.equal tyname nm -> auxs tys tyvars | _ -> false) | Ptyp_any | Ptyp_var _ -> false | Ptyp_alias (ty, _) -> aux ty | Ptyp_arrow (_, t1, t2) -> aux t1 || aux t2 | Ptyp_tuple tys | Ptyp_class (_, tys) -> List.exists aux tys | Ptyp_object (flds, _) -> List.exists aux_obj_fld flds | Ptyp_variant (row_flds, _, _) -> List.exists aux_row_fld row_flds | Ptyp_poly (_, ty) -> aux ty | Ptyp_package _ | Ptyp_extension _ -> Err.raise_unsupported_ty ty.ptyp_loc and aux_obj_fld fld = match fld.pof_desc with | Otag (_, ty) | Oinherit ty -> aux ty and aux_row_fld fld = match fld.prf_desc with | Rtag (_, _, tys) -> List.exists aux tys | Rinherit ty -> aux ty and auxs tys tyvars = match (tys, tyvars) with | ([], []) -> false | ({ ptyp_desc; _ } :: tys, tv :: tyvars) -> (match ptyp_desc with | Ptyp_var nm when not @@ String.equal nm tv -> true | _ -> auxs tys tyvars) | _ -> failwith "Type constructors have different arities" in aux ty let is_nonregular_in_fld tyname tyvars Record_field.{ ty; _ } = is_nonregular_in_core_ty tyname tyvars ty let is_nonregular_in_ctor tyname tyvars ctor = match ctor with | Variant_ctor.Constant_ctor _ -> false | Variant_ctor.Single_ctor (_, _, ty) -> is_nonregular_in_core_ty tyname tyvars ty | Variant_ctor.Tuple_ctor (_, _, tys) -> List.exists (is_nonregular_in_core_ty tyname tyvars) tys | Variant_ctor.Record_ctor (_, _, flds) -> List.exists (fun (fld, _) -> is_nonregular_in_fld tyname tyvars fld) flds let is_nonregular_in tyname tyvars decl = match decl with | Gadt Gadt.{ ctors; _ } -> List.exists (fun ((ctor, _), ret_ty) -> is_nonregular_in_ctor tyname tyvars ctor || is_nonregular_in_core_ty tyname tyvars ret_ty) ctors | Variant Variant.{ ctors; _ } -> List.exists (fun (ctor, _) -> is_nonregular_in_ctor tyname tyvars ctor) ctors | Record Record.{ fields; _ } -> List.exists (fun (fld, _) -> is_nonregular_in_fld tyname tyvars fld) fields | Alias Alias.{ ty; _ } -> is_nonregular_in_core_ty tyname tyvars ty | Unsupported _ -> false let is_regular tyname tyvars decls = not @@ List.exists (is_nonregular_in tyname tyvars) decls end module Graph : sig (** Generate a topologically sorted list of [Decl.t]s grouped into their strongly connected components. Why? We want to analyse whether we have a non-regular datatype so we can later generate explicit universal quantifiers and this requires us to looks for non-regular occurrences on the right hand side of any declaration within the strongly connected components. This has the pleasant side-effect of generating value bindings without redundant `let rec ... and ..`s *) val stratify : Decl.t list -> Decl.t list list end = struct let depth_first g = let len = Array.length g in let marked = Array.make len false and stack = Array.make len ~-1 in let idx = ref 0 in let push v = stack.(!idx) <- v; incr idx in let rec aux v = if not marked.(v) then ( marked.(v) <- true; List.iter aux g.(v); push v ) in for v = 0 to len - 1 do aux v done; stack let transpose g = let len = Array.length g in let g' = Array.make len [] in let edge src dst = g'.(src) <- dst :: g'.(src) in Array.iteri (fun src dsts -> List.iter (fun dst -> edge dst src) dsts) g; g' let mark g order = let len = Array.length g and g = transpose g in let marked = Array.make len false and id = Array.make len ~-1 in let count = ref 0 in let rec aux v = if not marked.(v) then ( marked.(v) <- true; id.(v) <- !count; List.iter aux g.(v) ) in for i = len - 1 downto 0 do let v = order.(i) in if not marked.(v) then ( aux v; incr count ) done; (id, !count) let scc g = mark g @@ depth_first g let edges_core_ty fwd ty = let rec aux acc ty = match ty.ptyp_desc with | Ptyp_any | Ptyp_var _ -> acc | Ptyp_alias (ty, _) -> aux acc ty | Ptyp_arrow (_, ty1, ty2) -> aux (aux acc ty1) ty2 | Ptyp_tuple tys | Ptyp_class (_, tys) -> List.fold_left aux acc tys | Ptyp_object (flds, _) -> List.fold_left aux_obj_fld acc flds | Ptyp_variant (flds, _, _) -> List.fold_left aux_row_fld acc flds | Ptyp_constr (ident, tys) -> let idx_opt = SMap.find_opt (String.concat "." @@ Longident.flatten_exn ident.txt) fwd in let acc = Option.value ~default:acc @@ Option.map (fun idx -> idx :: acc) idx_opt in List.fold_left aux acc tys | Ptyp_poly (_, ty) -> aux acc ty | Ptyp_package _ | Ptyp_extension _ -> Err.raise_unsupported_ty ty.ptyp_loc and aux_obj_fld acc fld = match fld.pof_desc with | Oinherit ty | Otag (_, ty) -> aux acc ty and aux_row_fld acc fld = match fld.prf_desc with | Rtag (_, _, tys) -> List.fold_left aux acc tys | Rinherit ty -> aux acc ty in aux [] ty let edges_lbl_decl fwd Record_field.{ ty; _ } = edges_core_ty fwd ty let edges_ctor_decl fwd ctor = match ctor with | Variant_ctor.Constant_ctor _ -> [] | Variant_ctor.Single_ctor (_, _, ty) -> edges_core_ty fwd ty | Variant_ctor.Tuple_ctor (_, _, tys) -> List.concat_map (edges_core_ty fwd) tys | Variant_ctor.Record_ctor (_, _, flds) -> List.concat_map (fun (fld, _) -> edges_lbl_decl fwd fld) flds let edges fwd decl = match decl with | Decl.Alias Alias.{ ty; _ } -> edges_core_ty fwd ty | Decl.Variant Variant.{ ctors; _ } -> List.concat_map (fun (ctor, _) -> edges_ctor_decl fwd ctor) ctors | Decl.Gadt Gadt.{ ctors; _ } -> List.concat_map (fun ((ctor, _), _) -> edges_ctor_decl fwd ctor) ctors | Decl.Record Record.{ fields; _ } -> List.concat_map (fun (fld, _) -> edges_lbl_decl fwd fld) fields | _ -> [] let stratify decls = let (fwd, bwd) = let ls = List.mapi (fun i decl -> (Decl.name decl, i, decl)) decls in List.fold_left (fun (sm, im) (nm, i, ty_decl) -> (SMap.add nm i sm, IMap.add i ty_decl im)) (SMap.empty, IMap.empty) ls in let adj = Array.of_list @@ List.map (edges fwd) decls in let (strata, nstrata) = scc adj in let out = Array.make nstrata [] in Array.iteri (fun idx stratum -> out.(stratum) <- IMap.find idx bwd :: out.(stratum)) strata; List.rev @@ Array.to_list out end module Analyse = struct type t = { decls: Decl.t list; opaque_map: bool SMap.t; decl_names: SSet.t; } let analyse tds = let decls = List.map Decl.of_ty_decl tds in let (opaque_map, decl_names) = List.fold_left (fun (mp, st) decl -> let (name, opaque) = Decl.opaque_info decl in (SMap.add name opaque mp, SSet.add name st)) (SMap.empty, SSet.empty) decls in { decls; opaque_map; decl_names } end module Transform_field : sig type definition = | Variant_ctors of string * (Variant_ctor.t * Annot.t option) list | Record_fields of string * (Record_field.t * Annot.t option) list | Core_ty of core_type type type_info = | Regular of bool | Locally_abstract type def = { ident: Ident.t; loc: location; ty: core_type; tyvars: label list; definition: definition; type_info: type_info; } type t = | Field of def | Unsupported of Unsupported.t val fields : Analyse.t -> t list list end = struct type definition = | Variant_ctors of string * (Variant_ctor.t * Annot.t option) list | Record_fields of string * (Record_field.t * Annot.t option) list | Core_ty of core_type type type_info = | Regular of bool | Locally_abstract type def = { ident: Ident.t; loc: location; ty: core_type; tyvars: label list; definition: definition; type_info: type_info; } type t = | Field of def | Unsupported of Unsupported.t (** Try to create a [Transform_field.t] from a [variant_ctor]. If the variant constructor has an inline record argument we fail since the type doesn't exist at the top level so we define a function accepting such a type *) let variant_ctor_field_opt ctor ~type_name = let open Variant_ctor in match ctor with | Record_ctor _ | Constant_ctor _ -> None | Single_ctor (ctor_name, loc, ty) -> let tyvars = List.rev @@ Core_ty.tyvars ty in let ident = Ident.Ctor (type_name, ctor_name) in Some (Field { ident; tyvars; ty; loc; type_info = Regular true; definition = Core_ty ty; }) | Tuple_ctor (ctor_name, loc, tys) -> let ident = Ident.Ctor (type_name, ctor_name) in let tyvars = List.rev @@ List.fold_left (fun acc ty -> Core_ty.tyvars ~acc ty) [] tys in let ty = ptyp_tuple ~loc tys in Some (Field { ident; tyvars; ty; loc; type_info = Regular true; definition = Core_ty ty; }) (** Create a list of [Transform_field.t]s for a variant type declaration. If the entire variant is marked [[@transform.opaque]] this will be the empty list. Otherwise, we will generate fields corresponding to: - the top-level variant type - each constructor marked as [[@transform.explicit]] *) let variant_fields Variant.{ name; loc; tyvars; ctors; opaque } ~decls = if opaque then [] else let ident = Ident.Type name in let ty = ptyp_constr ~loc { loc; txt = lident name } @@ List.map (ptyp_var ~loc) tyvars in let ty_field = Field { ident; tyvars; ty; loc; definition = Variant_ctors (name, ctors); type_info = Regular (Decl.is_regular name tyvars decls); } in let ctor_fields = List.filter_map (fun (ctor, annot_opt) -> Option.bind annot_opt (function | Annot.Opaque -> None | Annot.Explicit -> variant_ctor_field_opt ctor ~type_name:name)) ctors in ty_field :: ctor_fields let gadt_fields Gadt.{ name; loc; tyvars; ctors; opaque } = if opaque then [] else let ident = Ident.Type name in let ty = ptyp_constr ~loc { loc; txt = lident name } @@ List.map (ptyp_var ~loc) tyvars in let ty_field = Field { ident; tyvars; ty; loc; type_info = Locally_abstract; definition = Variant_ctors (name, List.map fst ctors); } in let ctor_fields = List.filter_map (fun ((ctor, annot_opt), _) -> Option.bind annot_opt (function | Annot.Opaque -> None | Annot.Explicit -> variant_ctor_field_opt ctor ~type_name:name)) ctors in ty_field :: ctor_fields let record_field_field_opt Record_field.{ label; ty; loc } ~type_name = let ident = Ident.Field (type_name, label) in let tyvars = List.rev @@ Core_ty.tyvars ty in Field { ident; tyvars; ty; loc; type_info = Regular true; definition = Core_ty ty; } (** Create a list of [Transform_field.t]s for a record type declaration. If the entire record is marked [[@transform.opaque]] this will be the empty list. Otherwise, we will generate fields corresponding to: - the top-level record type - each field marked as [[@transform.explicit]] *) let record_fields Record.{ name; loc; tyvars; fields; opaque } ~decls = if opaque then [] else let ident = Ident.Type name in let ty = ptyp_constr ~loc { loc; txt = lident name } @@ List.map (ptyp_var ~loc) tyvars in let ty_field = Field { ident; tyvars; ty; loc; definition = Record_fields (name, fields); type_info = Regular (Decl.is_regular name tyvars decls); } in let field_fields = List.filter_map (fun (fld, annot_opt) -> Option.bind annot_opt (function | Annot.Opaque -> None | Annot.Explicit -> Some (record_field_field_opt fld ~type_name:name))) fields in ty_field :: field_fields (** Try to generate a [Transform_field.t] for an alias type declaraion. This will be [None] if the alias is marked [[@transform.opaque]] *) let alias_transform_field_opt Alias.{ name; tyvars; ty; opaque; loc; _ } ~decls = if opaque then None else let ident = Ident.Type name in Some (Field { ident; tyvars; ty = ptyp_constr ~loc { loc; txt = lident name } @@ List.map (ptyp_var ~loc) tyvars; loc; definition = Core_ty ty; type_info = Regular (Decl.is_regular name tyvars decls); }) let decl_fields decl ~decls = match decl with | Decl.Variant variant -> variant_fields variant ~decls | Decl.Gadt gadt -> gadt_fields gadt | Decl.Record record -> record_fields record ~decls | Decl.Alias alias -> Option.to_list @@ alias_transform_field_opt alias ~decls | Decl.Unsupported unsupported -> [Unsupported unsupported] let fields Analyse.{ decls; _ } = List.filter_map (fun decls -> match List.concat_map (decl_fields ~decls) decls with | [] -> None | xs -> Some xs) @@ Graph.stratify decls end module Pass_field : sig val fields : Analyse.t -> label list list end = struct let record_field_fields acc (Record_field.{ ty; _ }, annot_opt) ~excluded = match annot_opt with | Some Annot.Opaque -> acc | _ -> Core_ty.tycons acc ty ~excluded let variant_ctor_fields acc (variant_ctor, annot_opt) ~excluded = match annot_opt with | Some Annot.Opaque -> acc | _ -> let open Variant_ctor in (match variant_ctor with | Constant_ctor _ -> acc | Single_ctor (_, _, ty) -> Core_ty.tycons acc ty ~excluded | Tuple_ctor (_, _, tys) -> List.fold_left (Core_ty.tycons ~excluded) acc tys | Record_ctor (_, _, flds) -> List.fold_left (record_field_fields ~excluded) acc flds) let decl_fields acc decl ~excluded = let open Decl in match decl with | Variant Variant.{ ctors; opaque; _ } -> if opaque then acc else List.fold_left (variant_ctor_fields ~excluded) acc ctors | Gadt Gadt.{ ctors; opaque; _ } -> if opaque then acc else List.fold_left (fun acc (ctor, _) -> variant_ctor_fields acc ctor ~excluded) acc ctors | Record Record.{ fields; opaque; _ } -> if opaque then acc else List.fold_left (record_field_fields ~excluded) acc fields | Alias Alias.{ ty; opaque; _ } -> if opaque then acc else Core_ty.tycons acc ty ~excluded | _ -> acc let fields Analyse.{ decls; decl_names = excluded; _ } = List.map snd @@ SMap.bindings @@ List.fold_left (decl_fields ~excluded) SMap.empty decls end module Gen_pass : sig val gen_sig : Transform_field.t list -> label list list -> loc:location -> allow_restart:Restart.t -> signature_item val gen_str : Transform_field.t list -> label list list -> loc:location -> allow_restart:Restart.t -> structure_item end = struct let mk_tag txt ~ty ~loc = rtag ~loc { loc; txt } false [ty] let gen_continuation_ty ty ~loc ~allow_restart = match allow_restart with | Restart.Allow -> let tags = List.map (mk_tag ~loc ~ty) Names. [stop_variant_label; continue_variant_label; restart_variant_label] in ptyp_variant ~loc tags Closed None | Restart.(Disallow Encode_as_variant) -> let tags = List.map (mk_tag ~loc ~ty) Names.[stop_variant_label; continue_variant_label] in ptyp_variant ~loc tags Closed None | Restart.(Disallow Encode_as_result) -> ptyp_constr ~loc { loc; txt = Lident "result" } [ty; ty] let gen_transform_field_def Transform_field.{ ident; tyvars; ty; _ } ~loc ~allow_restart = let ty = match ident with | Ident.Type name -> ptyp_constr ~loc { loc; txt = Lident name } @@ List.map (ptyp_var ~loc) tyvars | _ -> { ty with ptyp_attributes = [] } in let cont_ty = gen_continuation_ty ty ~loc ~allow_restart in let ctx = ptyp_var ~loc Names.ctx_arg in let transform_ty = [%type: ([%t ty] -> ctx:[%t ctx] -> [%t ctx] * [%t cont_ty]) option] in let type_ = ptyp_poly ~loc (List.map (fun txt -> { loc; txt }) tyvars) transform_ty in label_declaration ~loc ~name:{ loc; txt = Ident.field_name ident } ~mutable_:Immutable ~type_ let gen_transform_field fld ~loc ~allow_restart = match fld with | Transform_field.Field def -> gen_transform_field_def def ~loc ~allow_restart | Transform_field.Unsupported Unsupported.{ ident; _ } -> let type_ = ptyp_extension ~loc @@ Err.unsupported_ctor_args loc in label_declaration ~loc ~name:{ loc; txt = Ident.field_name ident } ~mutable_:Immutable ~type_ let gen_pass_field path ~loc = (* Given some identifier for a type, we expect the corresponding [pass] ty to be located in a module named [Transform] *) let (txt, field_name) = ( Longident.parse @@ String.concat "." @@ List.rev ("t" :: Names.pass_module_name :: path), String.concat "_" (Names.pass_field_pfx :: List.rev path) ) in let pass_ty = ptyp_constr ~loc { loc; txt } [ptyp_var ~loc Names.ctx_arg] in label_declaration ~loc ~name:{ loc; txt = field_name } ~mutable_:Immutable ~type_:[%type: [%t pass_ty] option] let params loc = [(ptyp_var ~loc Names.ctx_arg, (NoVariance, NoInjectivity))] let gen_kind transform_flds pass_flds ~loc ~allow_restart = Ptype_record (List.map (gen_transform_field ~loc ~allow_restart) transform_flds @ List.map (gen_pass_field ~loc) pass_flds) let gen_sig transform_flds pass_flds ~loc ~allow_restart = let params = params loc and kind = gen_kind transform_flds pass_flds ~loc ~allow_restart in psig_type ~loc Nonrecursive [ type_declaration ~loc ~name:{ loc; txt = "t" } ~params ~cstrs:[] ~private_:Public ~manifest:None ~kind; ] let gen_str transform_flds pass_flds ~loc ~allow_restart = let params = params loc and kind = gen_kind transform_flds pass_flds ~loc ~allow_restart in pstr_type ~loc Nonrecursive [ type_declaration ~loc ~name:{ loc; txt = "t" } ~params ~cstrs:[] ~private_:Public ~manifest:None ~kind; ] end module Gen_identity : sig val gen_sig : location -> signature_item val gen_str : Transform_field.t list -> label list list -> loc:location -> structure_item end = struct let gen_transform_field_def Transform_field.{ ident; loc; _ } = let fld_name = Ident.field_name ident in ({ loc; txt = Lident fld_name }, [%expr None]) let gen_transform_field fld = match fld with | Transform_field.Field def -> gen_transform_field_def def | Transform_field.Unsupported Unsupported.{ loc; ident; kind } -> let x = match kind with | Unsupported.Abstract -> Err.unsupported_abstract loc | Unsupported.Open -> Err.unsupported_open loc in ({ loc; txt = Lident (Ident.field_name ident) }, pexp_extension ~loc x) let gen_pass_field path ~loc = let txt = lident @@ String.concat "_" (Names.pass_field_pfx :: List.rev path) in let expr = [%expr None] in ({ loc; txt }, expr) let gen_str transform_fields pass_fields ~loc = let transform_exprs = List.map gen_transform_field transform_fields and pass_exprs = List.map (gen_pass_field ~loc) pass_fields in let pat = ppat_var ~loc { loc; txt = Names.identity_name } in let body_expr = pexp_record ~loc (transform_exprs @ pass_exprs) None in let expr = [%expr (fun _ -> [%e body_expr])] in pstr_value ~loc Nonrecursive [value_binding ~loc ~pat ~expr] let gen_sig loc = let name = { loc; txt = Names.identity_name } in let cstr_type = ptyp_constr ~loc { loc; txt = lident "t" } [ptyp_var ~loc Names.ctx_arg] in let type_ = [%type: unit -> [%t cstr_type]] in psig_value ~loc @@ value_description ~loc ~name ~type_ ~prim:[] end module Gen_combine = struct let gen_transform_field_def elem1 elem2 Transform_field.{ ident; loc; _ } ~allow_restart = let fld_name = Ident.field_name ident in let fld_lident = { loc; txt = Lident fld_name } in let elem_expr = pexp_ident ~loc { loc; txt = Lident "elem" } in let elem_pat = ppat_var ~loc { loc; txt = "elem" } in let ctx = pexp_ident ~loc { loc; txt = Lident Names.ctx_arg } in let t1 = pexp_ident ~loc { loc; txt = Lident "t1" } and t2 = pexp_ident ~loc { loc; txt = Lident "t2" } in let ident1 = pexp_ident ~loc { loc; txt = Lident elem1 } and ident2 = pexp_ident ~loc { loc; txt = Lident elem2 } in let proj1 = pexp_field ~loc ident1 fld_lident and proj2 = pexp_field ~loc ident2 fld_lident in let app1 = pexp_apply ~loc t1 [(Nolabel, elem_expr); (Labelled Names.ctx_arg, ctx)] in let app2 = pexp_apply ~loc t2 [(Nolabel, elem_expr); (Labelled Names.ctx_arg, ctx)] in let constr = match Restart.encoding allow_restart with | Restart.Encode_as_variant -> ppat_variant ~loc Names.continue_variant_label (Some elem_pat) | Restart.Encode_as_result -> ppat_construct ~loc { loc; txt = lident "Ok" } (Some elem_pat) in let match_expr = [%expr match [%e app1] with | (ctx, [%p constr]) -> [%e app2] | otherwise -> otherwise] in let fn_expr = pexp_fun ~loc Nolabel None (ppat_var ~loc { loc; txt = "elem" }) @@ pexp_fun ~loc (Labelled Names.ctx_arg) None (ppat_var ~loc { loc; txt = Names.ctx_arg }) match_expr in let expr = [%expr match ([%e proj1], [%e proj2]) with | (Some t1, Some t2) -> Some [%e fn_expr] | (None, _) -> [%e proj2] | _ -> [%e proj1]] in ({ loc; txt = Lident fld_name }, expr) let gen_transform_field elem1 elem2 fld ~allow_restart = match fld with | Transform_field.Field def -> gen_transform_field_def elem1 elem2 def ~allow_restart | Transform_field.Unsupported Unsupported.{ ident; loc; kind } -> let fld_name = Ident.field_name ident in let ext = match kind with | Unsupported.Abstract -> Err.unsupported_abstract loc | Unsupported.Open -> Err.unsupported_open loc in let expr = pexp_extension ~loc ext in ({ loc; txt = Lident fld_name }, expr) let gen_pass_field elem1 elem2 loc path = let (fn_name, fld_name) = ( Longident.parse @@ String.concat "." @@ List.rev (Names.combine_fn_name :: Names.pass_module_name :: path), String.concat "_" (Names.pass_field_pfx :: List.rev path) ) in let fn_expr = pexp_ident ~loc { loc; txt = fn_name } in let fld_lident = { loc; txt = Lident fld_name } in let ident1 = pexp_ident ~loc { loc; txt = Lident elem1 } and ident2 = pexp_ident ~loc { loc; txt = Lident elem2 } in let proj1 = pexp_field ~loc ident1 fld_lident and proj2 = pexp_field ~loc ident2 fld_lident in let expr = [%expr match ([%e proj1], [%e proj2]) with | (Some p1, Some p2) -> Some ([%e fn_expr] p1 p2) | (Some _, _) -> [%e proj1] | _ -> [%e proj2]] in ({ loc; txt = Lident fld_name }, expr) let gen_str transform_fields pass_fields ~loc ~allow_restart = let elem1 = "p1" and elem2 = "p2" in let transform_exprs = List.map (gen_transform_field elem1 elem2 ~allow_restart) transform_fields and pass_exprs = List.map (gen_pass_field elem1 elem2 loc) pass_fields in let pat = ppat_var ~loc { loc; txt = Names.combine_fn_name } and body_expr = pexp_record ~loc (transform_exprs @ pass_exprs) None in let pat1 = ppat_var ~loc { loc; txt = elem1 } and pat2 = ppat_var ~loc { loc; txt = elem2 } in let expr = [%expr (fun [%p pat1] [%p pat2] -> [%e body_expr])] in pstr_value ~loc Nonrecursive [value_binding ~loc ~pat ~expr] let gen_sig loc = let name = { loc; txt = Names.combine_fn_name } in let pass_ty = ptyp_constr ~loc { loc; txt = lident "t" } [ptyp_var ~loc Names.ctx_arg] in let type_ = [%type: [%t pass_ty] -> [%t pass_ty] -> [%t pass_ty]] in psig_value ~loc @@ value_description ~loc ~name ~type_ ~prim:[] end module Gen_fn = struct let gen_fun_ty ty ~loc = let pass_ty = ptyp_constr ~loc { loc; txt = Longident.parse @@ String.concat "." [Names.pass_module_name; "t"]; } [ptyp_var ~loc Names.ctx_arg] in let ctx = ptyp_var ~loc Names.ctx_arg in [%type: [%t ty] -> ctx:[%t ctx] -> top_down:[%t pass_ty] -> bottom_up:[%t pass_ty] -> [%t ty]] (** Generate a value binding for a [transform_...] or [traverse_...] function given the name, type info, the pattern for the transformed/traverse element, and the body expression. *) let gen_str fn_name ty tyvars type_info elem_pat body_expr loc = let fn_pat = ppat_var ~loc { loc; txt = fn_name } in let (pat, expr) = match type_info with | Transform_field.Regular _regular -> let expr = [%expr (fun [%p elem_pat] ~ctx ~top_down ~bottom_up -> [%e body_expr])] and pat = (* TODO: we currently check if each type declaration is non-regular within its strongly-connected components and mark each component as regular / non-regular individualy in order to generate explicit quantifiers when necessary. This seems to be the wrong thing - we actually want explicit quantifiers if _any_ component is non-regular For now, we just generate an explicitly quantified signature even when it isn't required. *) (* if regular then fn_pat else *) ppat_constraint ~loc fn_pat (ptyp_poly ~loc (List.map (fun txt -> { loc; txt }) tyvars) @@ gen_fun_ty ty ~loc) in (pat, expr) | Transform_field.Locally_abstract -> let expr = let ty = Core_ty.newtypes ty in let expr = [%expr fun ([%p elem_pat] : [%t ty]) ~ctx ~top_down ~bottom_up : [%t ty] -> [%e body_expr]] in List.fold_right (fun tyvar expr -> pexp_newtype ~loc { loc; txt = tyvar } expr) tyvars expr and pat = (* We always assume that a GADT needs explicit quantifiers TODO: I think this is true since we are generating recursive a recursive function but need to confirm *) ppat_constraint ~loc fn_pat (ptyp_poly ~loc (List.map (fun txt -> { loc; txt }) tyvars) @@ gen_fun_ty ty ~loc) in (pat, expr) in value_binding ~loc ~pat ~expr end module Gen_transform = struct let gen_str_def Transform_field.{ ident; ty; tyvars; loc; type_info; _ } ~should_traverse ~allow_restart = let field_name = Longident.parse @@ String.concat "." [Names.pass_module_name; Ident.field_name ident] in let project_bottom = pexp_field ~loc (pexp_ident ~loc { loc; txt = Lident Names.bottom_up_arg }) { loc; txt = field_name } and project_top = pexp_field ~loc (pexp_ident ~loc { loc; txt = Lident Names.top_down_arg }) { loc; txt = field_name } in let fn_name = Ident.transform_fn_name ident in let fn_ident = pexp_ident ~loc { loc; txt = lident fn_name } and traverse_ident = let txt = lident @@ Ident.traverse_fn_name ident in pexp_ident ~loc { loc; txt } in let (restart_pat, continue_pat, stop_pat, continue_stop_pat) = let elem_pat = ppat_var ~loc { loc; txt = "elem" } in let (cont_pat, stop_pat) = match Restart.encoding allow_restart with | Restart.Encode_as_variant -> ( ppat_variant ~loc Names.continue_variant_label @@ Some elem_pat, ppat_variant ~loc Names.stop_variant_label @@ Some elem_pat ) | _ -> ( ppat_construct ~loc { loc; txt = lident "Ok" } @@ Some elem_pat, ppat_construct ~loc { loc; txt = lident "Error" } @@ Some elem_pat ) in let restart_pat = ppat_variant ~loc Names.restart_variant_label @@ Some elem_pat in (restart_pat, cont_pat, stop_pat, ppat_or ~loc cont_pat stop_pat) in let rest_match = match allow_restart with | Restart.Allow -> [%expr match [%e project_bottom] with | None -> elem | Some bu -> (match bu elem ~ctx with | (_ctx, [%p continue_stop_pat]) -> elem | (_ctx, [%p restart_pat]) -> [%e fn_ident] elem ~ctx ~top_down ~bottom_up)] | Restart.(Disallow _) -> [%expr match [%e project_bottom] with | None -> elem | Some bu -> (match bu elem ~ctx with | (_ctx, [%p continue_stop_pat]) -> elem)] in let (rest, ctx_pat) = if not should_traverse then ((fun _ -> rest_match), ppat_var ~loc { loc; txt = "_ctx" }) else ( (fun ctx_expr -> [%expr let elem = [%e traverse_ident] elem ~ctx:[%e ctx_expr] ~top_down ~bottom_up in [%e rest_match]]), ppat_var ~loc { loc; txt = "td_ctx" } ) in let body_expr = match allow_restart with | Restart.Allow -> [%expr match [%e project_top] with | Some td -> (match td elem ~ctx with | (_ctx, [%p stop_pat]) -> elem | ([%p ctx_pat], [%p continue_pat]) -> [%e rest @@ pexp_ident ~loc { loc; txt = lident "td_ctx" }] | (_ctx, [%p restart_pat]) -> [%e fn_ident] elem ~ctx ~top_down ~bottom_up) | _ -> [%e rest @@ pexp_ident ~loc { loc; txt = lident "ctx" }]] | Restart.(Disallow _) -> [%expr match [%e project_top] with | Some td -> (match td elem ~ctx with | (_ctx, [%p stop_pat]) -> elem | ([%p ctx_pat], [%p continue_pat]) -> [%e rest @@ pexp_ident ~loc { loc; txt = lident "td_ctx" }]) | _ -> [%e rest @@ pexp_ident ~loc { loc; txt = lident "ctx" }]] in let elem_pat = ppat_var ~loc { loc; txt = "elem" } in Gen_fn.gen_str fn_name ty tyvars type_info elem_pat body_expr loc let gen_str fld ~should_traverse ~allow_restart = match fld with | Transform_field.Field def -> gen_str_def def ~should_traverse ~allow_restart | Transform_field.Unsupported Unsupported.{ loc; kind; _ } -> let err = match kind with | Unsupported.Abstract -> Err.unsupported_abstract loc | Unsupported.Open -> Err.unsupported_open loc in value_binding ~loc ~pat:(ppat_any ~loc) ~expr:(pexp_extension ~loc @@ err) let gen_sig_def Transform_field.{ ident; ty; loc; _ } = let fn_name = Ident.transform_fn_name ident and type_ = Gen_fn.gen_fun_ty ty ~loc in psig_value ~loc @@ value_description ~loc ~name:{ loc; txt = fn_name } ~type_ ~prim:[] let gen_sig fld = match fld with | Transform_field.Field def -> gen_sig_def def | Transform_field.Unsupported Unsupported.{ ident; loc; kind } -> let fn_name = Ident.transform_fn_name ident in let ext = match kind with | Unsupported.Abstract -> Err.unsupported_abstract loc | Unsupported.Open -> Err.unsupported_open loc in let type_ = ptyp_extension ~loc ext in psig_value ~loc @@ value_description ~loc ~name:{ loc; txt = fn_name } ~type_ ~prim:[] end module Gen_traverse = struct let gen_core_ty ty ~binding ~opaque_map = let rec aux ty binding = let loc = ty.ptyp_loc in let dflt_pat = ppat_var ~loc { loc; txt = binding } in let default = (dflt_pat, None) in let unsupported = (dflt_pat, Some (pexp_extension ~loc @@ Err.unsupported_ty loc)) in if Annot.has_opaque_attr ty.ptyp_attributes then default else match ty.ptyp_desc with | Ptyp_alias (ty, _) -> aux ty binding | Ptyp_arrow (arg_lbl, ty_dom, ty_codom) -> aux_arrow arg_lbl ty_dom ty_codom binding loc | Ptyp_constr ({ loc; txt }, tys) -> aux_constr txt tys binding loc | Ptyp_poly (_, ty) -> aux ty binding | Ptyp_tuple tys -> aux_tuple tys binding loc | Ptyp_variant (row_flds, closedflag, lbl_opts) -> aux_variant row_flds closedflag lbl_opts ~binding ~loc ~ty | Ptyp_any | Ptyp_var _ -> default | Ptyp_object _ | Ptyp_class _ | Ptyp_extension _ | Ptyp_package _ -> unsupported and aux_variant row_flds closed_flag lbl_opts ~binding ~loc ~ty = let pat = ppat_var ~loc { loc; txt = binding } in let has_lbl = match lbl_opts with | None -> (fun _ -> true) | Some lbls -> (fun lbl -> List.exists String.(equal lbl.txt) lbls) in let pat_expr_opts = List.map (aux_row_fld ~binding ~has_lbl ~loc ~ty) row_flds in let expr_opt = let (cases, has_untraversed) = List.fold_left (fun (cases, has_untraversed) (pat, expr_opt) -> match expr_opt with | None -> (cases, true) | Some expr -> (case ~lhs:pat ~guard:None ~rhs:expr :: cases, has_untraversed)) ([], false) pat_expr_opts in let empty = match cases with | [] -> true | _ -> false in if empty then None else let dflt_case_opt = let dflt_case = case ~lhs:pat ~guard:None ~rhs:(pexp_ident ~loc { loc; txt = lident binding }) in if has_untraversed then Some dflt_case else match closed_flag with | Open -> Some dflt_case | Closed -> None in let cases = List.rev @@ match dflt_case_opt with | None -> cases | Some case -> case :: cases in let scrut_expr = pexp_ident ~loc { loc; txt = lident binding } in Some (pexp_match ~loc scrut_expr cases) in (pat, expr_opt) and aux_row_fld row_fld ~binding ~has_lbl ~loc ~ty = let default = (ppat_var ~loc { loc; txt = binding }, None) in match row_fld.prf_desc with | Rtag (_, _, []) -> default | Rtag (lbl, _, _) when not @@ has_lbl lbl -> default | Rtag (lbl, _flag, tys) -> (match aux (ptyp_tuple ~loc tys) (binding ^ "_elem") with | (tuple_pat, (Some _ as tuple_expr)) -> let pat = ppat_variant ~loc lbl.txt (Some tuple_pat) in let expr = pexp_variant ~loc lbl.txt tuple_expr in (pat, Some expr) | _ -> default) | Rinherit extend_ty -> let binding = binding ^ "_extend" in (match aux extend_ty binding with | (_pat, Some expr) -> let ty_pat = ppat_type ~loc @@ Core_ty.ctor_longident_exn extend_ty in let pat = ppat_alias ~loc ty_pat { loc; txt = binding } in let coerce_expr = pexp_coerce ~loc expr None ty in (pat, Some coerce_expr) | _ -> default) and aux_constr lident tys binding loc = let flat_lident = Longident.flatten_exn lident in match (flat_lident, tys) with (* -- Common type constructors ---------------------------------------- *) | ((["option"] | ["Option"; "t"]), [ty]) -> aux_option ty binding loc | ((["list"] | ["List"; "t"]), [ty]) -> aux_functor [%expr Stdlib.List.map] ty binding loc | ((["array"] | ["Array"; "t"]), [ty]) -> aux_functor [%expr Stdlib.Array.map] ty binding loc | ((["lazy_t"] | ["Lazy"; "t"]), [ty]) -> aux_lazy ty binding loc | ((["result"] | ["Result"; ("result" | "t")]), [ty_ok; ty_err]) -> aux_result ty_ok ty_err binding loc | (["Either"; "t"], [ty_left; ty_right]) -> aux_either ty_left ty_right binding loc | (["ref"], [ty]) -> aux_ref ty binding loc (* -- Commom primitives ----------------------------------------------- *) | ([ty], []) when SSet.mem ty Core_ty.builtin_prims -> (ppat_var ~loc { loc; txt = binding }, None) (* -- - *) | ([decl_name], _) when SMap.exists (fun nm _ -> String.equal nm decl_name) opaque_map -> let pat = ppat_var ~loc { loc; txt = binding } in let opaque = SMap.find decl_name opaque_map in let expr_opt = if opaque then None else let fn_name = Ident.(transform_fn_name @@ Type decl_name) in let fn_expr = pexp_ident ~loc { loc; txt = Lident fn_name } in let binding_expr = pexp_ident ~loc { loc; txt = Lident binding } in Some [%expr [%e fn_expr] [%e binding_expr] ~ctx ~top_down ~bottom_up] in (pat, expr_opt) | (ids, _) -> let pat = ppat_var ~loc { loc; txt = binding } in let binding_expr = pexp_ident ~loc { loc; txt = Lident binding } in let (ty, path) = match List.rev ids with | ty :: path -> (ty, path) | _ -> failwith "Bad `Longident`" in let identity_lident = Longident.parse @@ String.concat "." @@ List.rev (Names.identity_name :: Names.pass_module_name :: path) in let identity_expr = pexp_ident ~loc { loc; txt = identity_lident } in let pass_fld_nm = String.concat "_" (Names.pass_field_pfx :: path) in let pass_fld = { loc; txt = Longident.parse @@ String.concat "." [Names.pass_module_name; pass_fld_nm]; } in let fn_name = let transform_fn = Ident.(transform_fn_name @@ Type ty) in Longident.parse @@ String.concat "." @@ List.rev (transform_fn :: path) in let fn_expr = pexp_ident ~loc { loc; txt = fn_name } and top_down_expr = pexp_field ~loc (pexp_ident ~loc { loc; txt = Lident Names.top_down_arg }) pass_fld and bottom_up_expr = pexp_field ~loc (pexp_ident ~loc { loc; txt = Lident Names.bottom_up_arg }) pass_fld in let expr = [%expr match ([%e top_down_expr], [%e bottom_up_expr]) with | (Some top_down, Some bottom_up) -> [%e fn_expr] [%e binding_expr] ~ctx ~top_down ~bottom_up | (Some top_down, _) -> [%e fn_expr] [%e binding_expr] ~ctx ~top_down ~bottom_up:([%e identity_expr] ()) | (_, Some bottom_up) -> [%e fn_expr] [%e binding_expr] ~ctx ~top_down:([%e identity_expr] ()) ~bottom_up | _ -> [%e binding_expr]] in (pat, Some expr) and aux_ref ty binding loc = let pat = ppat_var ~loc { loc; txt = binding } in let binding_deref = binding ^ "_deref" in let pat_deref = ppat_var ~loc { loc; txt = binding_deref } in let expr_elem = pexp_ident ~loc { loc; txt = Lident binding } in let expr_opt = match aux ty binding_deref with | (_pat, Some expr) -> Some [%expr let [%p pat_deref] = ![%e expr_elem] in [%e expr_elem] := [%e expr]; [%e expr_elem]] | _ -> None in (pat, expr_opt) and aux_lazy ty binding loc = let pat = ppat_var ~loc { loc; txt = binding } in let binding_forced = binding ^ "_force" in let pat_forced = ppat_var ~loc { loc; txt = binding_forced } in let expr_elem = pexp_ident ~loc { loc; txt = Lident binding } in let expr_opt = match aux ty binding_forced with | (_pat, Some expr) -> Some [%expr let [%p pat_forced] = Lazy.force [%e expr_elem] in lazy [%e expr]] | _ -> None in (pat, expr_opt) and aux_option ty binding loc = let pat = ppat_var ~loc { loc; txt = binding } in let scrut_expr = pexp_ident ~loc { loc; txt = lident binding } in let expr_opt = match aux ty (String.concat "_" [binding; "inner"]) with | (pat, Some expr) -> Some [%expr match [%e scrut_expr] with | Some [%p pat] -> Some [%e expr] | _ -> None] | _ -> None in (pat, expr_opt) and aux_result ty_ok ty_err binding loc = let pat = ppat_var ~loc { loc; txt = binding } in let scrut_expr = pexp_ident ~loc { loc; txt = lident binding } in let expr_opt = match ( aux ty_ok (String.concat "_" [binding; "ok"]), aux ty_err (String.concat "_" [binding; "err"]) ) with | ((pat_ok, Some expr_ok), (pat_err, Some expr_err)) -> Some [%expr match [%e scrut_expr] with | Ok [%p pat_ok] -> Ok [%e expr_ok] | Error [%p pat_err] -> Error [%e expr_err]] | ((pat_ok, Some expr_ok), _) -> Some [%expr match [%e scrut_expr] with | Ok [%p pat_ok] -> Ok [%e expr_ok] | _ -> [%e scrut_expr]] | (_, (pat_err, Some expr_err)) -> Some [%expr match [%e scrut_expr] with | Error [%p pat_err] -> Error [%e expr_err] | _ -> [%e scrut_expr]] | _ -> None in (pat, expr_opt) and aux_either ty_left ty_right binding loc = let pat = ppat_var ~loc { loc; txt = binding } in let scrut_expr = pexp_ident ~loc { loc; txt = lident binding } in let expr_opt = match ( aux ty_left (String.concat "_" [binding; "left"]), aux ty_right (String.concat "_" [binding; "right"]) ) with | ((pat_left, Some expr_left), (pat_right, Some expr_right)) -> Some [%expr Either.map ~left:(fun [%p pat_left] -> [%e expr_left]) ~right:(fun [%p pat_right] -> [%e expr_right]) [%e scrut_expr]] | ((pat_left, Some expr_left), _) -> Some [%expr Either.map_left (fun [%p pat_left] -> [%e expr_left]) [%e scrut_expr]] | (_, (pat_right, Some expr_right)) -> Some [%expr Either.map_right (fun [%p pat_right] -> [%e expr_right]) [%e scrut_expr]] | _ -> None in (pat, expr_opt) and aux_functor map_expr ty binding loc = let (inner_pat, inner_expr_opt) = aux ty binding in let pat = ppat_var ~loc { loc; txt = binding } in let arg_expr = pexp_ident ~loc { loc; txt = Lident binding } in let default = (pat, None) in Option.value ~default @@ Option.map (fun inner_expr -> let expr = [%expr [%e map_expr] (fun [%p inner_pat] -> [%e inner_expr]) [%e arg_expr]] in (pat, Some expr)) inner_expr_opt and aux_arrow _arg_lbl _ty_dom _ty_codom binding loc = (ppat_var ~loc { loc; txt = binding }, None) and aux_tuple tys binding loc = let (pats, expr_res) = Help.unzip @@ List.mapi (fun i ty -> let binding = String.concat "_" [binding; string_of_int i] in let (pat, expr_opt) = aux ty binding in let expr_res = match expr_opt with | Some expr -> Ok expr | _ -> Error binding in (pat, expr_res)) tys in if List.for_all Result.is_error expr_res then (ppat_var ~loc { loc; txt = binding }, None) else let exprs = List.map (function | Ok expr -> expr | Error binding -> pexp_ident ~loc { loc; txt = lident binding }) expr_res in (ppat_tuple ~loc pats, Some (pexp_tuple ~loc exprs)) in aux ty binding let gen_record_field Record_field.{ label; ty; _ } ~opaque_map = gen_core_ty ~binding:label ty ~opaque_map let gen_record_fields record_name record_fields ~loc ~opaque_map = let fld_opts = List.map (fun ((Record_field.{ label; loc; _ } as fld), annot_opt) -> match annot_opt with | Some Annot.Opaque -> ((label, loc), (ppat_var ~loc { loc; txt = label }, None)) | Some Annot.Explicit -> let pat = ppat_var ~loc { loc; txt = label } in let fn_nm = Ident.(transform_fn_name @@ Field (record_name, label)) in let fn_expr = pexp_ident ~loc { loc; txt = Lident fn_nm } and elem_expr = pexp_ident ~loc { loc; txt = Lident label } in let expr = [%expr [%e fn_expr] [%e elem_expr] ~ctx ~top_down ~bottom_up] in ((label, loc), (pat, Some expr)) | _ -> ((label, loc), gen_record_field fld ~opaque_map)) record_fields in let (pats, exprs, partial, empty) = List.fold_right (fun ((lbl, loc), (pat, expr_opt)) (pats, exprs, partial, empty) -> let ident = { loc; txt = lident lbl } in match expr_opt with | Some expr -> ((ident, pat) :: pats, (ident, expr) :: exprs, partial, false) | _ -> (pats, exprs, true, empty)) fld_opts ([], [], false, true) in if empty then (ppat_var ~loc { loc; txt = record_name }, None) else if partial then let rcd_pat = ppat_record ~loc pats Open in ( ppat_alias ~loc rcd_pat { loc; txt = record_name }, Some (pexp_record ~loc exprs @@ Some (pexp_ident ~loc { loc; txt = lident record_name })) ) else (ppat_record ~loc pats Closed, Some (pexp_record ~loc exprs None)) let gen_variant_ctor variant_name variant_ctor ~opaque_map ~explicit = let open Variant_ctor in match variant_ctor with | Constant_ctor (lbl, loc) -> ( ppat_construct ~loc { loc; txt = lident lbl } None, if explicit then Some (pexp_extension ~loc @@ Err.unsupported_ctor_args_empty loc) else None ) | Single_ctor (lbl, loc, _) when explicit -> let pat = ppat_construct ~loc { loc; txt = lident lbl } @@ Some (ppat_var ~loc { loc; txt = "elem" }) in let fn_nm = Ident.(transform_fn_name @@ Ctor (variant_name, lbl)) in let fn_expr = pexp_ident ~loc { loc; txt = Lident fn_nm } and elem_expr = pexp_ident ~loc { loc; txt = Lident "elem" } in let apply_expr = [%expr [%e fn_expr] [%e elem_expr] ~ctx ~top_down ~bottom_up] in let expr = pexp_construct ~loc { loc; txt = Lident lbl } @@ Some apply_expr in (pat, Some expr) | Single_ctor (lbl, loc, ty) -> let binding = String.(concat "_" [lowercase_ascii lbl; "elem"]) in let (ty_pat, ty_expr_opt) = gen_core_ty ~binding ty ~opaque_map in let pat = ppat_construct ~loc { loc; txt = lident lbl } @@ Some ty_pat in let expr_opt = Option.map (fun expr -> pexp_construct ~loc { loc; txt = lident lbl } @@ Some expr) ty_expr_opt in (pat, expr_opt) | Tuple_ctor (lbl, loc, tys) when explicit -> let tuple_pat = ppat_tuple ~loc @@ List.mapi (fun i _ -> let txt = "elem_" ^ string_of_int i in ppat_var ~loc { loc; txt }) tys in let pat = ppat_construct ~loc { loc; txt = lident lbl } @@ Some tuple_pat in let fn_nm = Ident.(transform_fn_name @@ Ctor (variant_name, lbl)) in let fn_expr = pexp_ident ~loc { loc; txt = Lident fn_nm } and elem_expr = pexp_tuple ~loc @@ List.mapi (fun i _ -> pexp_ident ~loc { loc; txt = Lident ("elem_" ^ string_of_int i) }) tys in let construct_expr = pexp_construct ~loc { loc; txt = Lident lbl } @@ Some elem_expr in let expr = [%expr let [%p tuple_pat] = [%e fn_expr] [%e elem_expr] ~ctx ~top_down ~bottom_up in [%e construct_expr]] in (pat, Some expr) | Tuple_ctor (lbl, loc, tys) -> let ty = ptyp_tuple ~loc tys in let binding = String.(concat "_" [lowercase_ascii lbl; "elem"]) in let (ty_pat, ty_expr_opt) = gen_core_ty ~binding ty ~opaque_map in let pat = ppat_construct ~loc { loc; txt = lident lbl } @@ Some ty_pat in let expr_opt = Option.map (fun expr -> pexp_construct ~loc { loc; txt = lident lbl } @@ Some expr) ty_expr_opt in (pat, expr_opt) | Record_ctor (lbl, loc, flds) -> let binding = String.lowercase_ascii lbl in let (ty_pat, ty_expr_opt) = gen_record_fields binding flds ~loc ~opaque_map in let pat = ppat_construct ~loc { loc; txt = lident lbl } @@ Some ty_pat in if explicit then (pat, Some (pexp_extension ~loc @@ Err.unsupported_ctor_args loc)) else let expr_opt = Option.map (fun expr -> pexp_construct ~loc { loc; txt = lident lbl } @@ Some expr) ty_expr_opt in (pat, expr_opt) let gen_variant_ctors variant_name variant_ctors ~loc ~opaque_map = let elem lbl = String.(concat "_" [lowercase_ascii lbl; "elem"]) in let ctor_opts = List.map (fun (ctor, annot_opt) -> let (loc, txt) = let open Variant_ctor in match ctor with | Constant_ctor (lbl, loc) -> (loc, elem lbl) | Single_ctor (lbl, loc, _) -> (loc, elem lbl) | Tuple_ctor (lbl, loc, _) -> (loc, elem lbl) | Record_ctor (lbl, loc, _) -> (loc, elem lbl) in match annot_opt with | Some Annot.Opaque -> ((txt, loc), (ppat_var ~loc { loc; txt }, None)) | Some Annot.Explicit -> ( (txt, loc), gen_variant_ctor variant_name ctor ~opaque_map ~explicit:true ) | _ -> ( (txt, loc), gen_variant_ctor variant_name ctor ~opaque_map ~explicit:false )) variant_ctors in let (pats, exprs, partial, empty) = List.fold_right (fun (_, (pat, expr_opt)) (pats, exprs, partial, empty) -> match expr_opt with | Some expr -> (pat :: pats, expr :: exprs, partial, false) | _ -> (pats, exprs, true, empty)) ctor_opts ([], [], false, true) in let pat_variant_nm = ppat_var ~loc { loc; txt = variant_name } and exp_variant_nm = pexp_ident ~loc { loc; txt = lident variant_name } in if empty then (pat_variant_nm, None) else let cases = let named = List.map2 (fun lhs rhs -> case ~lhs ~guard:None ~rhs) pats exprs in if partial then named @ [case ~lhs:pat_variant_nm ~guard:None ~rhs:exp_variant_nm] else named in let expr = pexp_match ~loc exp_variant_nm cases in (pat_variant_nm, Some expr) let gen_def Transform_field.{ ident; ty; loc; definition; tyvars; type_info; _ } ~opaque_map = let (pat, expr_opt) = match definition with | Transform_field.Core_ty def_ty -> gen_core_ty def_ty ~opaque_map ~binding:(Ident.to_string ident) | Transform_field.Variant_ctors (name, ctors) -> gen_variant_ctors name ctors ~loc ~opaque_map | Transform_field.Record_fields (name, flds) -> gen_record_fields name flds ~loc ~opaque_map in let fn_name = Ident.traverse_fn_name ident in Option.map (fun body_expr -> Gen_fn.gen_str fn_name ty tyvars type_info pat body_expr loc) expr_opt let gen_str fld ~opaque_map = match fld with | Transform_field.Field def -> gen_def def ~opaque_map | Transform_field.Unsupported Unsupported.{ loc; kind; _ } -> let err = match kind with | Unsupported.Abstract -> Err.unsupported_abstract loc | Unsupported.Open -> Err.unsupported_open loc in Some (value_binding ~loc ~pat:(ppat_any ~loc) ~expr:(pexp_extension ~loc @@ err)) end let gen_str ~loc ~path:_ (_rec_flag, tds) restart = let allow_restart = Option.value ~default:Restart.Allow restart in let analysis = Analyse.analyse tds in let strat_transform_fields = Transform_field.fields analysis in let pass_fields = Pass_field.fields analysis in let transform_fields = List.concat strat_transform_fields in let pass_ty_decl = Gen_pass.gen_str transform_fields pass_fields ~loc ~allow_restart in let combine_fn = Gen_combine.gen_str transform_fields pass_fields ~loc ~allow_restart in let identity = Gen_identity.gen_str transform_fields pass_fields ~loc in let fns = List.map (fun flds -> let vbs = List.concat_map (fun tfld -> match Gen_traverse.gen_str tfld ~opaque_map:analysis.Analyse.opaque_map with | Some vb -> [ vb; Gen_transform.gen_str tfld ~should_traverse:true ~allow_restart; ] | _ -> [ Gen_transform.gen_str tfld ~should_traverse:false ~allow_restart; ]) flds in let recursive = if List.length vbs = 1 && not (Restart.allowed allow_restart) then Nonrecursive else Recursive in pstr_value ~loc recursive vbs) strat_transform_fields in let pass_module = let name = { loc; txt = Some Names.pass_module_name } and expr = pmod_structure ~loc [pass_ty_decl; identity; combine_fn] in pstr_module ~loc { pmb_loc = loc; pmb_name = name; pmb_expr = expr; pmb_attributes = [] } in pass_module :: fns let gen_sig ~loc ~path:_ (_rec_flag, tds) restart = let allow_restart = Option.value ~default:Restart.Allow restart in let analysis = Analyse.analyse tds in let transform_fields = List.concat @@ Transform_field.fields analysis in let pass_fields = Pass_field.fields analysis in let pass_ty = Gen_pass.gen_sig transform_fields pass_fields ~loc ~allow_restart in let combine_fn = Gen_combine.gen_sig loc in let identity = Gen_identity.gen_sig loc in let fns = List.map Gen_transform.gen_sig transform_fields in let pass_module = let name = { loc; txt = Some Names.pass_module_name } and type_ = { pmty_loc = loc; pmty_attributes = []; pmty_desc = Pmty_signature [pass_ty; combine_fn; identity]; } in psig_module ~loc @@ module_declaration ~loc ~name ~type_ in pass_module :: fns let args () = let inner = Ast_pattern.( alt (as__ @@ pexp_variant (string "Encode_as_variant") none |> map1 ~f:(fun _ -> Restart.(Disallow Encode_as_variant))) (as__ @@ pexp_variant (string "Encode_as_result") none |> map1 ~f:(fun _ -> Restart.(Disallow Encode_as_result)))) in let pat = Ast_pattern.( alt (as__ @@ pexp_variant (string "Allow") none |> map1 ~f:(fun _ -> Restart.Allow)) (pexp_variant (string "Disallow") (some inner))) in Deriving.Args.(empty +> arg "restart" pat) let transform = Deriving.add Names.transform_pfx ~str_type_decl:Deriving.(Generator.make (args ()) gen_str) ~sig_type_decl:Deriving.(Generator.make (args ()) gen_sig)
OCaml
hhvm/hphp/hack/src/procs/bucket.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude (****************************************************************************) (* Moduling Making buckets. * When we parallelize, we need to create "buckets" of tasks for the * workers. * Given a list of files, we want to split it up into buckets such that * every worker is busy long enough. If the bucket is too big, it hurts * load balancing, if it is too small, the overhead in synchronization time * hurts *) (****************************************************************************) type 'a bucket = | Job of 'a | Wait | Done let is_done = function | Done -> true | Wait | Job _ -> false type 'a next = unit -> 'a bucket let default_max_size = 500 let calculate_bucket_size ~num_jobs ~num_workers ?max_size () = let max_size = Option.value max_size ~default:default_max_size in if num_jobs < num_workers * max_size then max 1 (1 + (num_jobs / num_workers)) else max_size let make_ progress_fn bucket_size jobs = let i = ref 0 in fun () -> let bucket_size = min (Array.length jobs - !i) bucket_size in progress_fn ~start:!i ~length:bucket_size; let result = Array.sub jobs ~pos:!i ~len:bucket_size in i := bucket_size + !i; Array.to_list result let make_list ~num_workers ?progress_fn ?max_size jobs = let progress_fn = Option.value ~default:(fun ~total:_ ~start:_ ~length:_ -> ()) progress_fn in let jobs = Array.of_list jobs in let bucket_size = calculate_bucket_size ~num_jobs:(Array.length jobs) ~num_workers ?max_size () in make_ (progress_fn ~total:(Array.length jobs)) bucket_size jobs let of_list = function | [] -> Done | wl -> Job wl let make ~num_workers ?progress_fn ?max_size jobs = let maker = make_list ~num_workers ?progress_fn ?max_size jobs in (fun () -> of_list (maker ())) type 'a of_n = { work: 'a; bucket: int; total: int; } let make_n_buckets ~buckets ~split = let next_bucket = ref 0 in fun () -> let current = !next_bucket in incr next_bucket; if current < buckets then Job { work = split ~bucket:current; bucket = current; total = buckets } else Done
OCaml Interface
hhvm/hphp/hack/src/procs/bucket.mli
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (* The general protocol for a next function is to return either Wait (indicating that workers should wait until more elements are added to the workload), or Job of a bucket, or Done to indicate there is no more work. *) type 'a bucket = | Job of 'a | Wait | Done val is_done : 'a bucket -> bool type 'a next = unit -> 'a bucket (** Given a number of jobs, number of workers, and a maximum bucket size, will calculate the optimal bucket size to get the work done as quickly as possible. Specifically, if the number of jobs is less than the number of workers times the maximum bucket size, smaller bucket sizes will be returned in order to utilize as many workers as possible. *) val calculate_bucket_size : num_jobs:int -> num_workers:int -> ?max_size:int -> unit -> int (* Makes a bucket out of a list, without regard for number of workers or the size of the list. *) val of_list : 'a list -> 'a list bucket val make : num_workers:int -> ?progress_fn:(total:int -> start:int -> length:int -> unit) -> ?max_size:int -> 'a list -> 'a list next type 'a of_n = { work: 'a; bucket: int; total: int; } (** * Make n buckets (where n = "buckets"). * * The "split" function provides the workload for the k'th bucket. *) val make_n_buckets : buckets:int -> split:(bucket:int -> 'a) -> 'a of_n next (* Specialized version to split into lists only. *) val make_list : num_workers:int -> ?progress_fn:(total:int -> start:int -> length:int -> unit) -> ?max_size:int -> 'a list -> unit -> 'a list
hhvm/hphp/hack/src/procs/dune
(library (name procs_bucket) (wrapped false) (modules bucket) (libraries core_kernel utils_core)) (library (name procs_procs) (wrapped false) (modules mem_profile multiThreadedCall multiWorker worker workerController) (libraries core_kernel heap_shared_mem logging marshal_tools procs_bucket procfs relative_path sys_utils utils_exit worker_cancel)) (library (name procs_entry_point) (wrapped false) (modules workerControllerEntryPoint) (libraries procs_procs folly_stubs))
OCaml
hhvm/hphp/hack/src/procs/mem_profile.ml
(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (* See src/facebook/profile/statMemProfMemProfile.ml for the implementation we use for statmemprof. *) let start () = () let stop () = ()
OCaml Interface
hhvm/hphp/hack/src/procs/mem_profile.mli
(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) val start : unit -> unit val stop : unit -> unit
OCaml
hhvm/hphp/hack/src/procs/multiThreadedCall.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module Hh_bucket = Bucket open Hh_prelude exception Coalesced_failures of WorkerController.worker_failure list let coalesced_failures_to_string failures = let failure_strings = List.map failures ~f:WorkerController.failure_to_string in Printf.sprintf "Coalesced_failures[%s]" (String.concat ~sep:", " failure_strings) let () = Caml.Printexc.register_printer @@ function | Coalesced_failures failures -> Some (coalesced_failures_to_string failures) | _ -> None type cancel_reason = { user_message: string; log_message: string; timestamp: float; } type interrupt_result = | Cancel of cancel_reason | Continue type 'env interrupt_handler = 'env -> 'env * interrupt_result type 'env interrupt_config = { env: 'env; handlers: 'env -> (Unix.file_descr * 'env interrupt_handler) list; } type worker_id = int let no_interrupt env = { handlers = (fun _ -> []); env } (* Integer that increases with every invocation of multi_threaded_call, used to * distinguish worker handles that belong to current job vs those that are still * processing some other job (in cases when multi_threaded_call is called during * an already ongoing multi_threaded_call job. *) let call_id = ref 0 (* Exceptions from parallel jobs are in general not recoverable - the workers * are dead and we don't respawn them. The only reason someone should catch * them is to log and exit. Setting on_exception handler allows you to do it * before any caller has a chance to catch the exception and attempt to handle * it. *) let nested_exception : Exception.t option ref = ref None let on_exception_ref = ref (fun e -> nested_exception := Some e) let multi_threaded_call (type job_input job_output acc env) workers (job : worker_id * acc -> job_input -> job_output) (merge : worker_id * job_output -> acc -> acc) (neutral : acc) (next : job_input Hh_bucket.next) ?(on_cancelled : (unit -> job_input list) option) (interrupt : env interrupt_config) : (acc * env) * (job_input list * cancel_reason) option = incr call_id; let call_id = !call_id in (* Split workers into those that are free, and those that are still doing * previous jobs. *) let (workers, handles) = List.fold workers ~init:([], []) ~f:(fun (workers, handles) worker -> (* Note than now some handles have mismatched types. We need to remember * to check their get_call_id against this multi_threaded_call call_id * before trusting the types. *) match WorkerController.get_handle_UNSAFE worker with | None -> (worker :: workers, handles) | Some handle -> (workers, handle :: handles)) in let is_current h = call_id = WorkerController.get_call_id h in (* merge accumulator, leaving environment and interrupt handlers untouched *) let merge x (y1, y2, y3) = (merge x y1, y2, y3) in (* interrupt handlers are irrelevant after job is done *) let unpack_result (acc, env, _handlers) = (acc, env) in let handler_fds (_, _, handlers) = List.map handlers ~f:fst in let rec add_pending acc = match next () with | Hh_bucket.Done -> acc | Hh_bucket.Job a -> add_pending (a :: acc) | Hh_bucket.Wait -> (* There's not really a good solution to generically getting the pending work items when attempting to cancel a job that's in the Wait state, so we depend on those jobs to determine their own state in the [on_cancelled] handler. *) failwith "cancelling jobs with Wait not supported" in (* When a job is cancelled, return all the jobs that were not started OR were * cancelled in the middle (so you better hope they are idempotent).*) let check_cancel handles ready_fds (acc, env, handlers) = let (env, decision, handlers) = List.fold handlers ~init:(env, Continue, handlers) ~f:(fun (env, prior_decision, handlers) (fd, handler) -> let is_fd_for_this_handler = List.mem ~equal:Poly.( = ) ready_fds fd in match prior_decision with | Cancel _ -> (env, prior_decision, handlers) | Continue when not is_fd_for_this_handler -> (env, prior_decision, handlers) | Continue -> let (env, decision) = handler env in (* Re-raise the exception even if handler have caught and ignored it *) Option.iter !nested_exception ~f:(fun e -> Exception.reraise e); (* running a handler could have changed the handlers, so need to regenerate them based on new environment *) let handlers = interrupt.handlers env in (env, decision, handlers)) in let res = (acc, env, handlers) in match decision with | Cancel reason -> WorkerController.cancel handles; let unfinished = match on_cancelled with | Some f -> f () | None -> let unfinished = List.map handles ~f:WorkerController.get_job in add_pending unfinished in (res, Some (unfinished, reason)) | Continue -> (res, None) in let rec dispatch workers handles acc = (* 'worker' represents available workers. *) (* 'handles' represents pendings jobs. *) (* 'acc' are the accumulated results. *) match workers with | None when not @@ List.exists handles ~f:is_current -> (* No more handles at this recursion level *) (unpack_result acc, None) | None (* No more jobs to start *) | Some [] -> (* No worker available: wait for some workers to finish. *) collect [] handles acc | Some (worker :: workers) -> (* At least one worker is available... *) (match next () with | Hh_bucket.Wait -> collect (worker :: workers) handles acc | Hh_bucket.Done -> (* ... but no more job to be distributed, let's collect results. *) dispatch None handles acc | Hh_bucket.Job bucket -> (* ... send a job to the worker.*) let worker_id = WorkerController.worker_id worker in let handle = WorkerController.call ~call_id worker (fun xl -> job (worker_id, neutral) xl) bucket in dispatch (Some workers) (handle :: handles) acc) and collect workers handles acc = let { WorkerController.readys; waiters; ready_fds } = WorkerController.select handles (handler_fds acc) in let workers = List.map ~f:WorkerController.get_worker readys @ workers in (* Collect the results. *) let (acc, failures) = (* Fold the results of all the finished workers. Also, coalesce the exit * statuses for all the failed workers. *) List.fold_left ~f: begin fun (acc, failures) h -> try let res = WorkerController.get_result h in (* Results for handles from other calls are cached by get_result * and will be retrieved later, so we ignore them here *) let acc = if is_current h then let worker_id = WorkerController.get_worker h |> WorkerController.worker_id in merge (worker_id, res) acc else acc in (acc, failures) with | WorkerController.Worker_failed (_, failure) -> (acc, failure :: failures) end ~init:(acc, []) readys in if not (List.is_empty failures) then (* If any single worker failed, we stop fanning out more jobs. *) raise (Coalesced_failures failures) else match check_cancel waiters ready_fds acc with | (acc, Some unfinished_and_reason) -> (unpack_result acc, Some unfinished_and_reason) | (acc, None) -> (* And continue.. *) dispatch (Some workers) waiters acc in try let () = nested_exception := None in dispatch (Some workers) handles (neutral, interrupt.env, interrupt.handlers interrupt.env) with | exn -> let e = Exception.wrap exn in !on_exception_ref e; Exception.reraise e let call_with_worker_id workers job merge neutral next = let ((res, ()), unfinished_and_reason) = multi_threaded_call workers job merge neutral next (no_interrupt ()) in assert (Option.is_none unfinished_and_reason); res let call workers job merge neutral next = let job (_id, a) b = job a b in let merge (_id, a) b = merge a b in let ((res, ()), unfinished_and_reason) = multi_threaded_call workers job merge neutral next (no_interrupt ()) in assert (Option.is_none unfinished_and_reason); res let call_with_interrupt workers job merge neutral next ?on_cancelled interrupt = SharedMem.set_allow_removes false; (* Interrupting of nested jobs is not implemented *) assert ( List.for_all workers ~f:(fun x -> Option.is_none @@ WorkerController.get_handle_UNSAFE x)); let job (_id, a) b = job a b in let merge (_id, a) b = merge a b in let ((res, interrupt_env), unfinished_and_reason) = multi_threaded_call workers job merge neutral next ?on_cancelled interrupt in SharedMem.set_allow_removes true; (res, interrupt_env, unfinished_and_reason) let on_exception f = on_exception_ref := f
OCaml Interface
hhvm/hphp/hack/src/procs/multiThreadedCall.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** If a worker process fails, this is raised. * * Note: When one worker process fails, the remaining in-progress workers are checked * for completion/failure, and all their failures (non-zero exit code) are coalesced * together into one of these exceptions. * * No further buckets are distributed to workers. * * Still-in-progress workers are left to their own accord. *) exception Coalesced_failures of WorkerController.worker_failure list val coalesced_failures_to_string : WorkerController.worker_failure list -> string (** If an interrupt handler wants the typecheck to be cancelled, it must always give a reason. *) type cancel_reason = { user_message: string; (** This string followed by "\nPlease re-run hh" will be printed to stdout by clientCheckStatus.ml, in the event that the typecheck got cancelled. *) log_message: string; (** This goes only to logs. The logs will have both [user_message] and [log_message]. A typical use of log_message is a callstack or exception message. *) timestamp: float; (** This goes only to logs. We might decide to write the timestamp at which the interrupt was generated, or at which it was handled. *) } (** An interrupt is set up as a pair [Unix.file_descr * 'env interrupt_handler]. Our interrupts are set up in serverMain.ml... * The file-descr for our watchman subscription and a handler which processes the watchman event; * The file-descr for our persistent connection and a handler which processes the RPC; * The file-descr for our "priority channel" i.e. new hh_client connections and a handler for them. For instance the watchman handler might determine that a .php file changed on disk, in which cas it returns [Cancel] and MultiThreadCall stops itself and returns all unfinished workitems back to its caller; or might determine that no material disk changes happened in which case it returns [Continue] and MultiThreadedCall will continue. *) type interrupt_result = | Cancel of cancel_reason | Continue type 'env interrupt_handler = 'env -> 'env * interrupt_result type 'env interrupt_config = { env: 'env; handlers: 'env -> (Unix.file_descr * 'env interrupt_handler) list; } type worker_id = int val no_interrupt : 'a -> 'a interrupt_config (** Can raise Coalesced_failures exception. *) val call : WorkerController.worker list -> ('c -> 'a -> 'b) -> ('b -> 'c -> 'c) -> 'c -> 'a Bucket.next -> 'c (** Invokes merge with a unique worker id. Can raise Coalesced_failures exception. *) val call_with_worker_id : WorkerController.worker list -> (worker_id * 'c -> 'a -> 'b) -> (worker_id * 'b -> 'c -> 'c) -> 'c -> 'a Bucket.next -> 'c (** The last element returned, a list of job inputs, are the job inputs which have not been processed fully or at all due to interrupts. *) val call_with_interrupt : WorkerController.worker list -> ('c -> 'a -> 'b) -> ('b -> 'c -> 'c) -> 'c -> 'a Bucket.next -> ?on_cancelled: ((* [on_cancelled] should be specified if your [next] function ever returns [Bucket.Wait], and it should return the list of all jobs that haven't finished or started yet. *) unit -> 'a list) -> 'd interrupt_config -> 'c * 'd * ('a list * cancel_reason) option val on_exception : (Exception.t -> unit) -> unit
OCaml
hhvm/hphp/hack/src/procs/multiWorker.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module Hh_bucket = Bucket open Hh_prelude (* Hide the worker type from our users *) type worker = WorkerController.worker type 'a interrupt_config = 'a MultiThreadedCall.interrupt_config let single_threaded_call_with_worker_id job merge neutral next = let x = ref (next ()) in let acc = ref neutral in (* This is a just a sanity check that the job is serializable and so * that the same code will work both in single threaded and parallel * mode. *) let _ = Marshal.to_string job [Marshal.Closures] in while not (Hh_bucket.is_done !x) do match !x with | Hh_bucket.Wait -> (* May waiting for remote worker to finish *) x := next () | Hh_bucket.Job l -> let res = job (0, neutral) l in acc := merge (0, res) !acc; x := next () | Hh_bucket.Done -> () done; !acc let single_threaded_call job merge neutral next = let job (_worker_id, a) b = job a b in let merge (_worker_id, a) b = merge a b in single_threaded_call_with_worker_id job merge neutral next module type CALLER = sig type 'a result val return : 'a -> 'a result val multi_threaded_call : WorkerController.worker list -> (WorkerController.worker_id * 'c -> 'a -> 'b) -> (WorkerController.worker_id * 'b -> 'c -> 'c) -> 'c -> 'a Hh_bucket.next -> 'c result end module CallFunctor (Caller : CALLER) : sig val call : WorkerController.worker list option -> job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> 'c Caller.result end = struct let call workers ~job ~merge ~neutral ~next = match workers with | None -> Caller.return (single_threaded_call_with_worker_id job merge neutral next) | Some workers -> Caller.multi_threaded_call workers job merge neutral next end module Call = CallFunctor (struct type 'a result = 'a let return x = x let multi_threaded_call = MultiThreadedCall.call_with_worker_id end) let call_with_worker_id = Call.call let call workers ~(job : 'acc -> 'input -> 'output) ~merge ~neutral ~next = let job (_worker_id, a) b = job a b in let merge (_worker_id, a) b = merge a b in Call.call workers ~job ~merge ~neutral ~next module type WorkItems_sig = sig type t type workitem val of_workitem : workitem -> t val pop : t -> workitem option * t val push : workitem -> t -> t end let call_stateless : (module WorkItems_sig with type workitem = 'input and type t = 'inputs) -> worker list -> job:('acc -> 'input -> 'input * 'output) -> merge:('output -> 'acc -> 'acc) -> neutral:'acc -> inputs:'inputs -> 'acc = fun (type input inputs) (module WorkItems : WorkItems_sig with type workitem = input and type t = inputs) workers ~job ~merge ~neutral ~inputs -> let inputs_ref = ref inputs in let next () = let (input, inputs) = WorkItems.pop !inputs_ref in inputs_ref := inputs; match input with | None -> Bucket.Done | Some input -> Bucket.Job input in let merge (input, output) acc = inputs_ref := WorkItems.push input !inputs_ref; merge output acc in call (Some workers) ~job ~merge ~neutral ~next (* If we ever want this in MultiWorkerLwt then move this into CallFunctor *) let call_with_interrupt ?on_cancelled workers ~job ~merge ~neutral ~next ~interrupt = match workers with | Some workers when List.length workers <> 0 -> Hh_logger.log "MultiThreadedCall.call_with_interrupt called with %d workers" (List.length workers); MultiThreadedCall.call_with_interrupt ?on_cancelled workers job merge neutral next interrupt | _ -> Hh_logger.log "single_threaded_call called with zero workers"; ( single_threaded_call job merge neutral next, interrupt.MultiThreadedCall.env, None ) let next ?progress_fn ?max_size workers = Hh_bucket.make ~num_workers: (match workers with | Some w -> List.length w | None -> 1) ?progress_fn ?max_size let make = WorkerController.make type call_wrapper = { f: 'a 'b 'c. worker list option -> job:('c -> 'a -> 'b) -> merge:('b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> 'c; } let wrapper = { f = call }
OCaml Interface
hhvm/hphp/hack/src/procs/multiWorker.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module Hh_bucket = Bucket open Hh_prelude (* The protocol for a next function is to return a list of elements. * It will be called repeatedly until it returns an empty list. *) module type CALLER = sig type 'a result val return : 'a -> 'a result val multi_threaded_call : WorkerController.worker list -> (WorkerController.worker_id * 'c -> 'a -> 'b) -> (WorkerController.worker_id * 'b -> 'c -> 'c) -> 'c -> 'a Hh_bucket.next -> 'c result end module CallFunctor (Caller : CALLER) : sig val call : WorkerController.worker list option -> job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> 'c Caller.result end type worker (* List of file descriptors that became ready (and triggered interruption), * returns whether current job should be cancelled *) type 'a interrupt_config = 'a MultiThreadedCall.interrupt_config val next : ?progress_fn:(total:int -> start:int -> length:int -> unit) -> ?max_size:int -> worker list option -> 'a list -> 'a list Hh_bucket.next (** Can raise MultiThreadedCall.Coalesced_failures unless in single-threaded mode. *) val call : worker list option -> job:('acc -> 'input -> 'output) -> merge:('output -> 'acc -> 'acc) -> neutral:'acc -> next:'input Hh_bucket.next -> 'acc module type WorkItems_sig = sig type t type workitem val of_workitem : workitem -> t val pop : t -> workitem option * t val push : workitem -> t -> t end (** [job] can return more items to process, e.g. the part of the input that it could not process. *) val call_stateless : (module WorkItems_sig with type workitem = 'input and type t = 'inputs) -> worker list -> job:('acc -> 'input -> 'input * 'output) -> merge:('output -> 'acc -> 'acc) -> neutral:'acc -> inputs:'inputs -> 'acc type call_wrapper = { f: 'a 'b 'c. worker list option -> job:('c -> 'a -> 'b) -> merge:('b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> 'c; } val wrapper : call_wrapper (* Can raise MultiThreadedCall.Coalesced_failures unless in single-threaded mode. *) val call_with_worker_id : worker list option -> job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> 'c (** The last element returned, a list of job inputs, are the job inputs which have not been processed fully or at all due to interrupts. *) val call_with_interrupt : ?on_cancelled: ((* [on_cancelled] should be specified if your [next] function ever returns [Hh_bucket.Wait], and it should return the list of all jobs that haven't finished or started yet. *) unit -> 'a list) -> worker list option -> job:('c -> 'a -> 'b) -> merge:('b -> 'c -> 'c) -> neutral:'c -> next:'a Hh_bucket.next -> interrupt:'d interrupt_config -> 'c * 'd * ('a list * MultiThreadedCall.cancel_reason) option (* Creates a pool of workers. *) val make : ?call_wrapper: (* See docs in WorkerController.worker for call_wrapper. *) WorkerController.call_wrapper -> longlived_workers:bool -> saved_state:'a -> entry:'a WorkerController.entry -> int -> gc_control:Gc.control -> heap_handle:SharedMem.handle -> worker list
OCaml
hhvm/hphp/hack/src/procs/worker.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module CamlGc = Gc open Hh_prelude module Gc = CamlGc (***************************************************************************** * The job executed by the worker. * * The 'serializer' is the job continuation: it is a function that must * be called at the end of the request ir order to send back the result * to the worker process (this is "internal business", this is not visible outside * this module). The clone process will provide the expected function. * cf 'send_result' in 'read_and_process_job. * *****************************************************************************) type request = Request of (serializer -> unit) * metadata_in and serializer = { send: 'a. 'a -> unit } and metadata_in = { log_globals: HackEventLogger.serialized_globals } type metadata_out = { stats: Measure.record_data; log_globals: HackEventLogger.serialized_globals; } type subprocess_job_status = Subprocess_terminated of Unix.process_status let on_clone_cancelled parent_outfd = (* The cancelling controller will ignore result of cancelled job anyway (see * wait_for_cancel function), so we can send back anything. Write twice, since * the normal response writes twice too *) Marshal_tools.to_fd_with_preamble parent_outfd "anything" |> ignore; Marshal_tools.to_fd_with_preamble parent_outfd "anything" |> ignore (***************************************************************************** * Process a single job in a worker (or a clone). *****************************************************************************) type job_outcome = [ `Success | `Error of Exit_status.t | `Worker_cancelled | `Controller_has_died ] let read_and_process_job ic oc : job_outcome = let start_user_time = ref 0. in let start_system_time = ref 0. in let start_minor_words = ref 0. in let start_promoted_words = ref 0. in let start_major_words = ref 0. in let start_minor_collections = ref 0 in let start_major_collections = ref 0 in let start_wall_time = ref 0. in let start_proc_fs_status = ref None in let infd = Daemon.descr_of_in_channel ic in let outfd = Daemon.descr_of_out_channel oc in let send_result data = Mem_profile.stop (); let tm = Unix.times () in let end_user_time = tm.Unix.tms_utime +. tm.Unix.tms_cutime in let end_system_time = tm.Unix.tms_stime +. tm.Unix.tms_cstime in let { Gc.minor_words = end_minor_words; promoted_words = end_promoted_words; major_words = end_major_words; minor_collections = end_minor_collections; major_collections = end_major_collections; _; } = Gc.quick_stat () in let (major_time, minor_time) = Sys_utils.get_gc_time () in Measure.sample "worker_gc_major_wall_time" major_time; Measure.sample "worker_gc_minor_wall_time" minor_time; Measure.sample "worker_user_time" (end_user_time -. !start_user_time); Measure.sample "worker_system_time" (end_system_time -. !start_system_time); Measure.sample "worker_wall_time" (Unix.gettimeofday () -. !start_wall_time); Measure.track_distribution "minor_words" ~bucket_size:(float (100 * 1024 * 1024)); Measure.sample "minor_words" (end_minor_words -. !start_minor_words); Measure.track_distribution "promoted_words" ~bucket_size:(float (25 * 1024 * 1024)); Measure.sample "promoted_words" (end_promoted_words -. !start_promoted_words); Measure.track_distribution "major_words" ~bucket_size:(float (50 * 1024 * 1024)); Measure.sample "major_words" (end_major_words -. !start_major_words); Measure.sample "minor_collections" (float (end_minor_collections - !start_minor_collections)); Measure.sample "major_collections" (float (end_major_collections - !start_major_collections)); begin match (!start_proc_fs_status, ProcFS.status_for_pid (Unix.getpid ())) with | ( Some { ProcFS.rss_total = start; _ }, Ok { ProcFS.rss_total = total; rss_hwm = hwm; _ } ) -> Measure.sample "worker_rss_start" (float start); Measure.sample "worker_rss_delta" (float (total - start)); Measure.sample "worker_rss_hwm_delta" (float (hwm - start)) | _ -> () end; (* After this point, it is critical to not throw a Worker_should_exit exception; otherwise outfd might end up being corrupted *) WorkerCancel.with_no_cancellations (fun () -> let len = Measure.time "worker_send_response" (fun () -> Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd data) in if len > 30 * 1024 * 1024 (* 30 MiB *) then ( Hh_logger.log ("WARNING(WORKER_LARGE_DATA_SEND): you are sending quite a lot of " ^^ "data (%d bytes), which may have an adverse performance impact. " ^^ "If you are sending closures, double-check to ensure that " ^^ "they have not captured large values in their environment.") len; HackEventLogger.worker_large_data_send ~path:Relative_path.default (Telemetry.create () |> Telemetry.int_ ~key:"len" ~value:len) ); Measure.sample "worker_response_len" (float len); let metadata_out = { stats = Measure.serialize (Measure.pop_global ()); log_globals = HackEventLogger.serialize_globals (); } in let _ = Marshal_tools.to_fd_with_preamble outfd metadata_out in ()) in try Measure.push_global (); let request : request = Measure.time "worker_read_request" (fun () -> Marshal_tools.from_fd_with_preamble infd) in let (Request (do_process, { log_globals })) = request in let tm = Unix.times () in let gc = Gc.quick_stat () in Sys_utils.start_gc_profiling (); start_user_time := tm.Unix.tms_utime +. tm.Unix.tms_cutime; start_system_time := tm.Unix.tms_stime +. tm.Unix.tms_cstime; start_minor_words := gc.Gc.minor_words; start_promoted_words := gc.Gc.promoted_words; start_major_words := gc.Gc.major_words; start_minor_collections := gc.Gc.minor_collections; start_major_collections := gc.Gc.major_collections; start_wall_time := Unix.gettimeofday (); start_proc_fs_status := ProcFS.status_for_pid (Unix.getpid ()) |> Core.Result.ok; HackEventLogger.deserialize_globals log_globals; Mem_profile.start (); do_process { send = send_result }; `Success with | WorkerCancel.Worker_should_exit -> `Worker_cancelled | SharedMem.Out_of_shared_memory -> `Error Exit_status.Out_of_shared_memory | SharedMem.Hash_table_full -> `Error Exit_status.Hash_table_full | SharedMem.Heap_full -> `Error Exit_status.Heap_full | SharedMem.Sql_assertion_failure err_num -> `Error begin match err_num with | 11 -> Exit_status.Sql_corrupt | 14 -> Exit_status.Sql_cantopen | 21 -> Exit_status.Sql_misuse | _ -> Exit_status.Sql_assertion_failure end | End_of_file -> (* This happens in the expected graceful shutdown path of our unit tests: the controller shuts down its end of the pipe, and therefore when we call [from_fd_with_preamble] above to get the next work-item, we get End_of_file. We're catching it here, rather than solely around [from_fd_with_preamble], because it's easier. This is fine because workers do no reading other than from the server. *) `Controller_has_died | Unix.Unix_error (Unix.EPIPE, _, _) -> (* This happens in the expected abrupt shutdown path of hh_server: the controller process shuts down, and therefore when we finish our batch of work and try to write the answer in [send_result] above, we get EPIPE. We're catching it here, rather than solely around [send_result], because it's easier. This is fine because workers have no other pipes other than to the server. We do log to the server-log, though, which is fair since it was an abrupt shutdown. *) (* Note: there are other manifestations of server shutdown, e.g. Marshal_tools.Reading_Preamble_Exception. I'm not confident I know all of them, nor can tell which ones are expected vs unexpected, so I'll leave them all to the catch-all handler below. *) Hh_logger.log "Worker got EPIPE due to server shutdown"; `Controller_has_died | exn -> let e = Exception.wrap exn in Hh_logger.log "WORKER_EXCEPTION %s" (Exception.to_string e |> Exception.clean_stack); EventLogger.log_if_initialized (fun () -> HackEventLogger.worker_exception e); (* What exit code should we emit for an uncaught exception? The ocaml runtime emits exit code 2 for uncaught exceptions. We should really pick our own different code here, but (history) we don't. How can we convey exit code 2? By unfortunate accident, Exit_status.Type_error gets turned into "2". So that's how we're going to return exit code 2. Yuck. *) `Error Exit_status.Type_error (***************************************************************************** * Entry point for spawned worker. *****************************************************************************) (* The exit code used when the controller died and the clone could not read * the input job *) let controller_has_died_code = 1 let process_job_and_exit ic oc = match read_and_process_job ic oc with | `Success -> exit 0 | `Error status -> Exit.exit status | `Worker_cancelled -> on_clone_cancelled (Daemon.descr_of_out_channel oc); exit 0 | `Controller_has_died -> `Controller_has_died let win32_worker_main restore (state, _controller_fd) (ic, oc) = (* On Windows, there is no clone process, the worker does the job directly and exits when it is done. *) restore state; match process_job_and_exit ic oc with | `Controller_has_died -> exit 0 let maybe_send_status_to_controller fd status = match fd with | None -> () | Some fd -> let to_controller fd msg = ignore (Marshal_tools.to_fd_with_preamble fd msg : int) in (match status with | Unix.WEXITED 0 -> () | Unix.WEXITED code when code = controller_has_died_code -> (* Since the controller died we'd get an error writing to its * fd; so we simply do not do anything. *) () | _ -> Timeout.with_timeout ~timeout:10 ~on_timeout:(fun _ -> Hh_logger.log "Timed out sending status to controller") ~do_:(fun _ -> to_controller fd (Subprocess_terminated status))) (* On Unix each job runs in a forked process. The first thing these jobs do is * deserialize a marshaled closure which is the job. * * The marshaled representation of a closure includes a MD5 digest of the code * segment and an offset. The digest is lazily computed, but if it has not been * computed before the fork, then each forked process will need to compute it. * * To avoid this, we deserialize a dummy closure before forking, so that we only * need to calculate the digest once per worker instead of once per job. *) let dummy_closure () = () (** * On Windows, the worker is a process and runs the job directly. See above. * * On Unix, the worker is split into a main worker process and a clone worker * process with the main process reaping the clone process with waitpid. * The clone runs the actual job and sends the results over the output channel. * If the clone exits normally (exit code 0), the main worker process keeps * running and waiting for the next incoming job before forking a new clone. * * If the clone exits with a non-zero code, the main worker process also exits * with the same code. Thus, the contoller process of this worker can just * waitpid directly on the main worker process and see correct exit codes. * * NOTE: `WSIGNALED i` and `WSTOPPED i` are all coalesced into `exit 2` * and `exit 3` respectively, so some resolution is lost. If the clone worker * is, for example, SIGKILL'd by the OOM killer, the controller process won't * be aware of this. * * To regain this lost resolution, controller_fd can be optionally set. The * real exit status (includinng WSIGNALED and WSTOPPED) will be sent over * this file descriptor to the controller when the clone worker exits * abnormally (with a non-zero exit code). *) let unix_worker_main restore (state, controller_fd) (ic, oc) = restore state; (* see dummy_closure above *) ignore Marshal.(from_bytes (to_bytes dummy_closure [Closures]) 0); let in_fd = Daemon.descr_of_in_channel ic in while true do (* Wait for an incoming job: is there something to read? But we don't read it yet. It will be read by the forked clone. *) let (readyl, _, _) = Unix.select [in_fd] [] [] (-1.0) in if List.is_empty readyl then exit 0; (* We fork a clone process for every incoming request and we let it exit after one request. This is the quickest GC. *) match Fork.fork () with | 0 -> (match process_job_and_exit ic oc with | `Controller_has_died -> exit controller_has_died_code) | pid -> (* Wait for the clone process termination... *) let status = snd (Sys_utils.waitpid_non_intr [] pid) in let () = maybe_send_status_to_controller controller_fd status in (match status with | Unix.WEXITED 0 -> () | Unix.WEXITED code when code = controller_has_died_code -> (* The controller has died, we can stop working *) exit 0 | Unix.WEXITED code -> Printf.printf "Worker exited (code: %d)\n" code; Stdlib.flush stdout; Stdlib.exit code | Unix.WSIGNALED x -> let sig_str = PrintSignal.string_of_signal x in Printf.printf "Worker interrupted with signal: %s\n" sig_str; Stdlib.flush stdout; Stdlib.exit 2 | Unix.WSTOPPED x -> Printf.printf "Worker stopped with signal: %d\n" x; Stdlib.flush stdout; Stdlib.exit 3) done; assert false (* This functions offers the same functionality as unix_worker_main but * does not clone a process for each incoming job. *) let unix_worker_main_no_clone restore (state, controller_fd) (ic, oc) = (* T83401330: Long-lived workers are not production ready because they will not flush their logs often enough (c.f. EventLogger.flush). This can be addressed in this file, or in the user code that needs to log. *) restore state; let exit code = let status = Unix.WEXITED (Exit_status.exit_code code) in let () = maybe_send_status_to_controller controller_fd status in Exit.exit code in let in_fd = Daemon.descr_of_in_channel ic in let out_fd = Daemon.descr_of_out_channel oc in while true do let (readyl, _, _) = Unix.select [in_fd] [] [] (-1.0) in if List.is_empty readyl then exit Exit_status.No_error; match read_and_process_job ic oc with | `Success -> () | `Error status -> exit status | `Worker_cancelled -> on_clone_cancelled out_fd | `Controller_has_died -> (* The controller has died, we can stop working *) exit Exit_status.No_error done; (* The only way out of the above loop is to exit *) assert false
OCaml Interface
hhvm/hphp/hack/src/procs/worker.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type request = Request of (serializer -> unit) * metadata_in and serializer = { send: 'a. 'a -> unit } and metadata_in = { log_globals: HackEventLogger.serialized_globals } type metadata_out = { stats: Measure.record_data; log_globals: HackEventLogger.serialized_globals; } type subprocess_job_status = Subprocess_terminated of Unix.process_status val win32_worker_main : ('a -> 'b) -> 'a * Unix.file_descr option -> request Daemon.in_channel * 'c Daemon.out_channel -> 'd val unix_worker_main : ('a -> 'b) -> 'a * Unix.file_descr option -> request Daemon.in_channel * 'c Daemon.out_channel -> 'd val unix_worker_main_no_clone : ('a -> 'b) -> 'a * Unix.file_descr option -> request Daemon.in_channel * 'c Daemon.out_channel -> 'd
OCaml
hhvm/hphp/hack/src/procs/workerController.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude open Worker (***************************************************************************** * Module building workers * * A worker is a subprocess executing an arbitrary function * * You should first create a fixed amount of workers and then use those * because the amount of workers is limited and to make the load-balancing * of tasks better (cf multiWorker.ml) * * On Unix, we spawn workers when initializing Hack. Then, each * worker forks a clone process for each incoming request. * The forked clone will exit after processing a single request. * * On Windows, we do not pre-spawn when initializing Hack, we just * allocate all the required information into a record. Then, we * spawn a worker process for each incoming request. * It will also exit after one request. * * A worker never handles more than one request at a time. * *****************************************************************************) type process_id = int type worker_id = int type worker_failure = (* Worker force quit by Out Of Memory. *) | Worker_oomed | Worker_quit of Unix.process_status exception Worker_failed of (process_id * worker_failure) exception Worker_busy type send_job_failure = | Worker_already_exited of Unix.process_status | Other_send_job_failure of exn exception Worker_failed_to_send_job of send_job_failure let status_string = function | Unix.WEXITED i -> Printf.sprintf "WEXITED %d" i | Unix.WSIGNALED i -> Printf.sprintf "WSIGNALED %d" i | Unix.WSTOPPED i -> Printf.sprintf "WSTOPPED %d" i let failure_to_string f = match f with | Worker_oomed -> "Worker_oomed" | Worker_quit s -> Printf.sprintf "(Worker_quit %s)" (status_string s) let () = Caml.Printexc.register_printer @@ function | Worker_failed_to_send_job (Other_send_job_failure exn) -> Some (Printf.sprintf "Other_send_job_failure: %s" (Exn.to_string exn)) | Worker_failed_to_send_job (Worker_already_exited status) -> Some (Printf.sprintf "Worker_already_exited: %s" (status_string status)) | Worker_failed (id, failure) -> Some (Printf.sprintf "Worker_failed (process_id = %d): %s" id (failure_to_string failure)) | _ -> None (* Should we 'prespawn' the worker ? *) let use_prespawned = not Sys.win32 (* The maximum amount of workers *) let max_workers = 1000 type void (* an empty type *) type call_wrapper = { wrap: 'x 'b. ('x -> 'b) -> 'x -> 'b } (***************************************************************************** * Everything we need to know about a worker. * *****************************************************************************) type worker = { (* Simple id for the worker. This is not the worker pid: on Windows, we spawn * a new worker for each job. * * This is also an offset into the shared heap segment, used to access * worker-local data. As such, the numbering is important. The IDs must be * dense and start at 1. (0 is the controller process offset.) *) id: int; (* The call wrapper will wrap any workload sent to the worker (via "call" * below) before invoking the workload. * * That is, when calling the worker with workload `f x`, it will be wrapped * as `wrap (f x)`. * * This allows universal handling of workload at the time we create the actual * workers. For example, this can be useful to handle exceptions uniformly * across workers regardless what workload is called on them. *) call_wrapper: call_wrapper option; (* On Unix, the main worker process sends status messages over this fd to this * controller. On Windows, it doesn't send anything, so don't try to read from * it (it should be set to None). *) controller_fd: Unix.file_descr option; (* Sanity check: is the worker still available ? *) mutable force_quit: bool; (* Sanity check: is the worker currently busy ? *) mutable busy: bool; (* If the worker is currently busy, handle of the job it's execuing *) mutable handle: 'a 'b. ('a, 'b) handle option; (* On Unix, a reference to the 'prespawned' worker. *) prespawned: (void, request) Daemon.handle option; (* On Windows, a function to spawn a worker. *) spawn: unit -> (void, request) Daemon.handle; } (***************************************************************************** * The handle is what we get back when we start a job. It's a "future" * (sometimes called a "promise"). The scheduler uses the handle to retrieve * the result of the job when the task is done (cf multiWorker.ml). * *****************************************************************************) and ('a, 'b) handle = ('a, 'b) delayed ref (* Integer represents job the handle belongs to. * See MultiThreadedCall.call_id. *) and ('a, 'b) delayed = ('a * int) * 'b worker_handle and 'b worker_handle = | Processing of 'b job | Cached of 'b * worker | Canceled | Failed of Exception.t (* The controller's job has a worker. The worker is a single process on Windows. * On Unix, the worker consists of a main and a clone worker processes. *) and 'a job = { (* The associated worker *) worker: worker; (* The file descriptor we might pass to select in order to wait for the worker to finish its job. *) infd: Unix.file_descr; (* A blocking function that returns the job result. *) result: unit -> 'a; (* A blocking function that waits for job cancellation (see Worker.cancel) * to finish *) wait_for_cancel: unit -> unit; } let worker_id w = w.id (* Has the worker been force quit *) let is_force_quit w = w.force_quit (* Mark the worker as busy. Throw if it is already busy *) let mark_busy w = if w.busy then raise Worker_busy; w.busy <- true let get_handle_UNSAFE w = w.handle (* Mark the worker as free *) let mark_free w = w.busy <- false; w.handle <- None (* If the worker isn't prespawned, spawn the worker *) let spawn w = match w.prespawned with | None -> w.spawn () | Some handle -> handle (* If the worker isn't prespawned, close the worker *) let close w h = if Option.is_none w.prespawned then Daemon.close h (* If there is a call_wrapper, apply it and create the Request *) let wrap_request w f x metadata_in = match w.call_wrapper with | Some { wrap } -> Request ((fun { send } -> send (wrap f x)), metadata_in) | None -> Request ((fun { send } -> send (f x)), metadata_in) type 'a entry_state = 'a * Gc.control * SharedMem.handle * int (* The first bool parameter specifies whether to use worker clones * or not: for non-longlived-workers, we must clone. *) type 'a worker_params = { longlived_workers: bool; entry_state: 'a entry_state; controller_fd: Unix.file_descr option; } type 'a entry = ('a worker_params, request, void) Daemon.entry (************************************************************************** * Creates a pool of workers. * **************************************************************************) let workers = ref [] (* Build one worker. *) let make_one ?call_wrapper controller_fd spawn id = if id >= max_workers then failwith "Too many workers"; let prespawned = if not use_prespawned then None else Some (spawn ()) in let worker = { call_wrapper; controller_fd; id; busy = false; handle = None; force_quit = false; prespawned; spawn; } in workers := worker :: !workers; worker (* Make a few workers. When workload is given to a worker (via "call" below), * the workload is wrapped in the call_wrapper. *) let make ?call_wrapper ~longlived_workers ~saved_state ~entry nbr_procs ~gc_control ~heap_handle : worker list = let setup_controller_fd () = if use_prespawned then let (parent_fd, child_fd) = Unix.pipe () in (* parent_fd is only used in this process. Don't leak it to children. * This will auto-close parent_fd in children created with Daemon.spawn * since Daemon.spawn uses exec. *) let () = Unix.set_close_on_exec parent_fd in (Some parent_fd, Some child_fd) else (* We don't use the side channel on Windows. *) (None, None) in let spawn worker_id name child_fd () = SharedMem.clear_close_on_exec heap_handle; (* Daemon.spawn runs exec after forking. We explicitly *do* want to "leak" * child_fd to this one spawned process because it will be using that FD to * send messages back up to us. Close_on_exec is probably already false, but * we force it again to be false here just in case. *) Option.iter child_fd ~f:Unix.clear_close_on_exec; let state = (saved_state, gc_control, heap_handle, worker_id) in let handle = Daemon.spawn ~name (Daemon.null_fd (), Unix.stdout, Unix.stderr) entry { longlived_workers; entry_state = state; controller_fd = child_fd } in SharedMem.set_close_on_exec heap_handle; (* This process no longer needs child_fd after its spawned the child. * Messages are read using controller_fd. *) Option.iter child_fd ~f:Unix.close; handle in let made_workers = ref [] in let pid = Unix.getpid () in for n = 1 to nbr_procs do let (controller_fd, child_fd) = setup_controller_fd () in let name = Printf.sprintf "worker_process_%d_out_of_%d_for_server_pid_%d" n nbr_procs pid in made_workers := make_one ?call_wrapper controller_fd (spawn n name child_fd) n :: !made_workers done; !made_workers (************************************************************************** * Send a job to a worker * **************************************************************************) let call ?(call_id = 0) w (type a b) (f : a -> b) (x : a) : (a, b) handle = if is_force_quit w then Printf.ksprintf failwith "force quit worker (%d)" (worker_id w); mark_busy w; (* Spawn the worker, if not prespawned. *) let ({ Daemon.pid = worker_pid; channels = (inc, outc) } as h) = spawn w in let infd = Daemon.descr_of_in_channel inc in let outfd = Daemon.descr_of_out_channel outc in let worker_failed pid_stat controller_fd = (* If we have a controller fd, we read the clone exit status * over that channel instead of using the one of the worker * process. *) let pid_stat = match controller_fd with | None -> snd pid_stat | Some fd -> Timeout.with_timeout ~timeout:3 ~on_timeout:(fun _ -> snd pid_stat) ~do_:(fun _ -> try let (Subprocess_terminated status) = Marshal_tools.from_fd_with_preamble fd in status with | End_of_file -> snd pid_stat) in match pid_stat with | Unix.WEXITED i when i = Exit_status.(exit_code Out_of_shared_memory) -> raise SharedMem.Out_of_shared_memory | Unix.WEXITED i -> Printf.eprintf "Subprocess(%d): fail %d" worker_pid i; raise (Worker_failed (worker_pid, Worker_quit (Unix.WEXITED i))) | Unix.WSTOPPED i -> raise (Worker_failed (worker_pid, Worker_quit (Unix.WSTOPPED i))) | Unix.WSIGNALED i -> raise (Worker_failed (worker_pid, Worker_quit (Unix.WSIGNALED i))) in (* Checks if the worker has exited. *) let with_exit_status_check ?(block_on_waitpid = false) worker_pid f = let wait_flags = if block_on_waitpid then [] else [Unix.WNOHANG] in let pid_stat = Unix.waitpid wait_flags worker_pid in match pid_stat with | (0, _) -> f () | (_, Unix.WEXITED 0) -> (* This will never actually happen. A worker process only exits if this * controller process has exited. *) failwith "Worker process exited 0 unexpectedly" | _ -> worker_failed pid_stat w.controller_fd in (* Prepare to read the answer from the worker process. *) let get_result_with_status_check ?(block_on_waitpid = false) () : b = with_exit_status_check ~block_on_waitpid worker_pid (fun () -> let data : b = Marshal_tools.from_fd_with_preamble infd in let ({ stats; log_globals } : metadata_out) = Marshal_tools.from_fd_with_preamble infd in close w h; Measure.merge (Measure.deserialize stats); HackEventLogger.deserialize_globals log_globals; data) in let result () : b = (* * We run the "with_exit_status_check" twice (first time non-blockingly). * This is because of a race condition. * * Immediately after the main worker process forks the clone process (see worker.ml), * it does a blocking, non-interruptible waitpid on the clone process. This means * that if the clone process fails, the main worker process will see the failure and * also fail accordingly, which we will catch in with "with_exit_status_check". * This is designed around an assumption that, if the clone fails, * the main worker process will also fail. Therefore, the WorkerController here * will see the failure and not attempt to read the result with * "Marshal_tools.from_fd_with_preamble" * * However, there is a scenario in which the assumption above cannot hold when * the clone process fails * - the worker clone process is forked * - the WorkerController checks the worker's main process's status * - the non-interruptible waitpid call hasn't started yet * * Under such circumstances, the WorkerController could try to read the result * with Marshal_tools, get an End_of_file, and crash. * * To get around this, we give the main worker process time to "catch up" and reach * the non-interruptible waitpid that we expect it to be at. Eventually, it will also * fail accordingly, since its clone has failed. *) try get_result_with_status_check () with | End_of_file -> get_result_with_status_check ~block_on_waitpid:true () in let wait_for_cancel () : unit = with_exit_status_check worker_pid (fun () -> (* Depending on whether we manage to force quit the worker before it starts writing * results back, this will return either actual results, or "anything" * (written by interrupt signal that exited). The types don't match, but we * ignore both of them anyway. *) let (_ : 'c) = Marshal_tools.from_fd_with_preamble infd in let (_ : 'c) = Marshal_tools.from_fd_with_preamble infd in ()) in let job = { result; infd; worker = w; wait_for_cancel } in let metadata_in = { log_globals = HackEventLogger.serialize_globals () } in let (request : Worker.request) = wrap_request w f x metadata_in in (* Send the job to the worker. *) let () = try Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd request |> ignore with | e -> begin match Unix.waitpid [Unix.WNOHANG] worker_pid with | (0, _) -> raise (Worker_failed_to_send_job (Other_send_job_failure e)) | (_, status) -> raise (Worker_failed_to_send_job (Worker_already_exited status)) end in (* And returned the 'handle'. *) let handle : (a, b) handle = ref ((x, call_id), Processing job) in w.handle <- Some (Obj.magic handle); handle (************************************************************************** * Read results from a handle. * This might block if the worker hasn't finished yet. * **************************************************************************) let with_worker_exn (handle : ('a, 'b) handle) job f = try f () with | Worker_failed (pid, status) as exn -> let e = Exception.wrap exn in mark_free job.worker; handle := (fst !handle, Failed e); begin match status with | Worker_quit (Unix.WSIGNALED -7) -> raise (Worker_failed (pid, Worker_oomed)) | _ -> Exception.reraise e end | exn -> let e = Exception.wrap exn in mark_free job.worker; handle := (fst !handle, Failed e); Exception.reraise e let get_result d = match snd !d with | Cached (x, _) -> x | Failed e -> Exception.reraise e | Canceled -> raise End_of_file | Processing s -> with_worker_exn d s (fun () -> let res = s.result () in mark_free s.worker; d := (fst !d, Cached (res, s.worker)); res) (***************************************************************************** * Our polling primitive on workers * Given a list of handle, returns the ones that are ready. * *****************************************************************************) type ('a, 'b) selected = { readys: ('a, 'b) handle list; waiters: ('a, 'b) handle list; ready_fds: Unix.file_descr list; } let get_processing ds = List.rev_filter_map ds ~f:(fun d -> match snd !d with | Processing p -> Some p | _ -> None) let select ds additional_fds = let processing = get_processing ds in let fds = List.map ~f:(fun { infd; _ } -> infd) processing in let (ready_fds, _, _) = if List.is_empty fds || List.length processing <> List.length ds then ([], [], []) else Sys_utils.select_non_intr (fds @ additional_fds) [] [] (-1.) in let additional_ready_fds = List.filter ~f:(List.mem ~equal:Poly.( = ) ready_fds) additional_fds in List.fold_right ~f:(fun d acc -> match snd !d with | Cached _ | Canceled | Failed _ -> { acc with readys = d :: acc.readys } | Processing s when List.mem ~equal:Poly.( = ) ready_fds s.infd -> { acc with readys = d :: acc.readys } | Processing _ -> { acc with waiters = d :: acc.waiters }) ~init:{ readys = []; waiters = []; ready_fds = additional_ready_fds } ds let get_worker h = match snd !h with | Processing { worker; _ } -> worker | Cached (_, worker) -> worker | Canceled | Failed _ -> invalid_arg "Worker.get_worker" let get_job h = fst (fst !h) let get_call_id h = snd (fst !h) (************************************************************************** * Worker termination **************************************************************************) let force_quit w = if not (is_force_quit w) then ( w.force_quit <- true; Option.iter ~f:Daemon.force_quit w.prespawned ) let force_quit_all () = List.iter ~f:force_quit !workers let wait_for_cancel d = match snd !d with | Processing s -> with_worker_exn d s (fun () -> s.wait_for_cancel (); mark_free s.worker; d := (fst !d, Canceled)) | _ -> () let cancel handles = WorkerCancel.stop_workers (); List.iter handles ~f:(fun x -> wait_for_cancel x); WorkerCancel.resume_workers (); ()
OCaml Interface
hhvm/hphp/hack/src/procs/workerController.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude (*****************************************************************************) (* Module building workers. * A worker is a subprocess executing an arbitrary function. * You should first create a fixed amount of workers and then use those * because the amount of workers is limited and to make the load-balancing * of tasks better (cf multiWorker.ml). *) (*****************************************************************************) type process_id = int type worker_id = int type worker_failure = (* Worker foce quit by Out Of Memory. *) | Worker_oomed | Worker_quit of Unix.process_status exception Worker_failed of (process_id * worker_failure) (* Raise this exception when sending work to a worker that is already busy. * We should never be doing that, and this is an assertion error. *) exception Worker_busy val failure_to_string : worker_failure -> string type send_job_failure = | Worker_already_exited of Unix.process_status | Other_send_job_failure of exn exception Worker_failed_to_send_job of send_job_failure (* The type of a worker visible to the outside world *) type worker (*****************************************************************************) (* The handle is what we get back when we start a job. It's a "future" * (sometimes called a "promise"). The scheduler uses the handle to retrieve * the result of the job when the task is done (cf multiWorker.ml). *) (*****************************************************************************) type ('job, 'result) handle (* An empty type *) type void (* Get the worker's id *) val worker_id : worker -> worker_id (* Has the worker been force quit *) val is_force_quit : worker -> bool (* Mark the worker as busy. Throw if it is already busy *) val mark_busy : worker -> unit (* If the worker is busy, what is it doing. Note that calling this is not * type safe: 'a and 'b are free type variables, and they depend on what is the * job being executed by worker. *) val get_handle_UNSAFE : worker -> ('a, 'b) handle option (* Mark the worker as free *) val mark_free : worker -> unit (* If the worker isn't prespawned, spawn the worker *) val spawn : worker -> (void, Worker.request) Daemon.handle (* If the worker isn't prespawned, close the worker *) val close : worker -> (void, Worker.request) Daemon.handle -> unit type call_wrapper = { wrap: 'x 'b. ('x -> 'b) -> 'x -> 'b } type 'a entry_state = 'a * Gc.control * SharedMem.handle * int (* The first bool parameter specifies whether to use worker clones * or not: for non-longlived-workers, we must clone. *) type 'a worker_params = { longlived_workers: bool; entry_state: 'a entry_state; controller_fd: Unix.file_descr option; } type 'a entry = ('a worker_params, Worker.request, void) Daemon.entry (* Creates a pool of workers. *) val make : ?call_wrapper: (* See docs in WorkerController.worker for call_wrapper. *) call_wrapper -> longlived_workers:bool -> saved_state:'a -> entry:'a entry -> int -> gc_control:Gc.control -> heap_handle:SharedMem.handle -> worker list (** Call in a sub-process (CAREFUL, GLOBALS ARE COPIED) *) val call : ?call_id:int -> worker -> ('a -> 'b) -> 'a -> ('a, 'b) handle (* See MultiThreadedCall.call_id *) val get_call_id : ('a, 'b) handle -> int (* Retrieves the job that the worker is currently processing *) val get_job : ('a, 'b) handle -> 'a (* Retrieves the result (once the worker is done) hangs otherwise *) val get_result : ('a, 'b) handle -> 'b (* Selects among multiple handles those which are ready. *) type ('a, 'b) selected = { readys: ('a, 'b) handle list; waiters: ('a, 'b) handle list; (* Additional (non worker) ready fds that we selected on. *) ready_fds: Unix.file_descr list; } val select : ('a, 'b) handle list -> Unix.file_descr list -> ('a, 'b) selected (* Returns the worker which produces this handle *) val get_worker : ('a, 'b) handle -> worker (* Force quit the workers *) val force_quit_all : unit -> unit val cancel : ('a, 'b) handle list -> unit
OCaml
hhvm/hphp/hack/src/procs/workerControllerEntryPoint.ml
(* * Copyright (c) 2021, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude open Worker open WorkerController let entry_counter = ref 0 let win32_worker ~restore p = (* Explicitly ensure that Folly is initialized (installs signal handlers) *) Folly.ensure_folly_init (); win32_worker_main restore (p.entry_state, p.controller_fd) let unix_worker ~restore { longlived_workers; entry_state; controller_fd } = (* Explicitly ensure that Folly is initialized (installs signal handlers) *) Folly.ensure_folly_init (); if longlived_workers then unix_worker_main_no_clone restore (entry_state, controller_fd) else unix_worker_main restore (entry_state, controller_fd) let register ~restore = incr entry_counter; let restore (st, gc_control, heap_handle, worker_id) = restore st ~worker_id; SharedMem.connect heap_handle ~worker_id; Gc.set gc_control in let name = Printf.sprintf "subprocess_%d" !entry_counter in let worker_main = if Sys.win32 then win32_worker ~restore else unix_worker ~restore in Daemon.register_entry_point name worker_main
OCaml Interface
hhvm/hphp/hack/src/procs/workerControllerEntryPoint.mli
(* * Copyright (c) 2021, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) val register : restore:('a -> worker_id:int -> unit) -> 'a WorkerController.entry
OCaml
hhvm/hphp/hack/src/providers/ast_provider.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude (*****************************************************************************) (* Table containing all the Abstract Syntax Trees (cf ast.ml) for each file.*) (*****************************************************************************) (* We store only the names and declarations in the ParserHeap. The full flag in each function runs a full parsing with method bodies. *) type parse_type = | Decl | Full module ParserHeap = SharedMem.HeapWithLocalCache (SharedMem.ImmediateBackend (SharedMem.Evictable)) (Relative_path.S) (struct type t = Nast.program * parse_type let description = "Ast_Parser" end) (struct let capacity = 1000 end) module LocalParserCache = SharedMem.FreqCache (Relative_path.S) (struct type t = Nast.program let description = "Ast_ParserLocal" end) (struct let capacity = 1000 end) let parse (popt : ParserOptions.t) ~(full : bool) ~(source_text : Full_fidelity_source_text.t) : Errors.t * Parser_return.t = let path = source_text.Full_fidelity_source_text.file_path in let parser_env = Full_fidelity_ast.make_env ~quick_mode:(not full) ~parser_options:popt path in let (err, result) = Errors.do_with_context path @@ fun () -> Full_fidelity_ast.from_source_text_with_legacy parser_env source_text in let ast = result.Parser_return.ast in let ast = if Relative_path.(is_hhi (prefix path)) && ParserOptions.deregister_php_stdlib popt then Nast.deregister_ignored_attributes ast else ast in (err, { result with Parser_return.ast }) let get_from_local_cache ~full ctx file_name = let with_no_err ast = (Errors.empty, ast) in let fn = Relative_path.to_absolute file_name in match LocalParserCache.get file_name with | Some ast -> with_no_err ast | None -> let popt = Provider_context.get_popt ctx in let f contents = let contents = if FindUtils.file_filter fn then contents else "" in match Ide_parser_cache.get_ast_if_active popt file_name contents with | Some ast -> with_no_err ast.Parser_return.ast | None -> let source = Full_fidelity_source_text.make file_name contents in (match Full_fidelity_parser.parse_mode source with | None -> with_no_err [] | Some _ -> (* It's up to Parsing_service to add parsing errors. *) let (err, result) = Errors.do_with_context file_name @@ fun () -> Full_fidelity_ast.defensive_program ~quick:(not full) popt file_name contents in (err, result.Parser_return.ast)) in let (err, ast) = Option.value_map ~default:(with_no_err []) ~f (File_provider.get_contents file_name) in let ast = if Relative_path.(is_hhi (prefix file_name)) && ParserOptions.deregister_php_stdlib popt then Nast.deregister_ignored_attributes ast else ast in if full && Errors.is_empty err then LocalParserCache.add file_name ast; (err, ast) let compute_source_text ~(entry : Provider_context.entry) : Full_fidelity_source_text.t = match entry with | { Provider_context.source_text = Some source_text; _ } -> source_text | _ -> let contents = Provider_context.read_file_contents_exn entry in let source_text = Full_fidelity_source_text.make entry.Provider_context.path contents in entry.Provider_context.source_text <- Some source_text; source_text (* Note that some callers may not actually need the AST errors. This could be improved with a method similar to the TAST-and-errors generation, where the TAST errors are not generated unless necessary. *) let compute_parser_return_and_ast_errors ~(popt : ParserOptions.t) ~(entry : Provider_context.entry) : Parser_return.t * Errors.t = match entry with | { Provider_context.ast_errors = Some ast_errors; parser_return = Some parser_return; _; } -> (parser_return, ast_errors) | _ -> let source_text = compute_source_text ~entry in let (ast_errors, parser_return) = parse popt ~full:true ~source_text in entry.Provider_context.ast_errors <- Some ast_errors; entry.Provider_context.parser_return <- Some parser_return; (parser_return, ast_errors) let compute_cst ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) : Provider_context.PositionedSyntaxTree.t = (* TODO: use parser options inside ctx *) let _ = ctx in match entry.Provider_context.cst with | Some cst -> cst | None -> let source_text = compute_source_text ~entry in let cst = Provider_context.PositionedSyntaxTree.make source_text in entry.Provider_context.cst <- Some cst; cst let compute_ast_with_error ~(popt : ParserOptions.t) ~(entry : Provider_context.entry) : Errors.t * Nast.program = let ({ Parser_return.ast; _ }, ast_errors) = compute_parser_return_and_ast_errors ~popt ~entry in (ast_errors, ast) let compute_ast ~(popt : ParserOptions.t) ~(entry : Provider_context.entry) : Nast.program = compute_ast_with_error ~popt ~entry |> snd let compute_comments ~(popt : ParserOptions.t) ~(entry : Provider_context.entry) : Parser_return.comments = let ({ Parser_return.comments; _ }, _ast_errors) = compute_parser_return_and_ast_errors ~popt ~entry in comments let compute_file_info ~(popt : ParserOptions.t) ~(entry : Provider_context.entry) : FileInfo.t = let ast = compute_ast ~popt ~entry in Nast.get_def_names ast let get_ast_with_error ~(full : bool) ctx path = Counters.count Counters.Category.Get_ast @@ fun () -> let parse_from_disk_no_caching ~apply_file_filter = let absolute_path = Relative_path.to_absolute path in if (not apply_file_filter) || FindUtils.file_filter absolute_path then let contents = Sys_utils.cat absolute_path in let source_text = Full_fidelity_source_text.make path contents in let (err, { Parser_return.ast; _ }) = parse (Provider_context.get_popt ctx) ~full ~source_text in (err, ast) else (Errors.empty, []) in (* If there's a ctx, and this file is in the ctx, then use ctx. *) (* Otherwise, the way we fetch/cache ASTs depends on the provider. *) let entry_opt = Relative_path.Map.find_opt (Provider_context.get_entries ctx) path in match (entry_opt, Provider_context.get_backend ctx) with | (_, Provider_backend.Pessimised_shared_memory info) when not info.Provider_backend.allow_ast_caching -> parse_from_disk_no_caching ~apply_file_filter:true | (Some entry, _) -> (* See documentation on `entry` for its invariants. The compute_ast function will use the cached (full) AST if present, and otherwise will compute a full AST and cache it and return it. It's okay for get_ast to return a full AST even if only asked for a partial one. Our principle is that an ctx entry always indicates that the file is open in the IDE, and so will benefit from a full AST at some time, so we might as well get it now. *) compute_ast_with_error ~popt:(Provider_context.get_popt ctx) ~entry | ( _, ( Provider_backend.Rust_provider_backend _ | Provider_backend.Shared_memory | Provider_backend.Pessimised_shared_memory _ ) ) -> begin (* Note that we might be looking up the shared ParserHeap directly, *) (* or maybe into a local-change-stack due to quarantine. *) match (ParserHeap.get path, full) with | (None, true) | (Some (_, Decl), true) -> (* If we need full, and parser-heap can't provide it, then we *) (* don't want to write a full decl into the parser heap. *) get_from_local_cache ~full ctx path | (None, false) -> (* This is the case where we will write into the parser heap. *) let (err, ast) = get_from_local_cache ~full ctx path in if Errors.is_empty err then ParserHeap.add path (ast, Decl); (err, ast) | (Some (ast, _), _) -> (* It's in the parser-heap! hurrah! *) (Errors.empty, ast) end | (_, Provider_backend.Analysis) -> (* Zoncolan has its own caching layers and does not make use of Hack's *) parse_from_disk_no_caching ~apply_file_filter:false | (_, Provider_backend.Local_memory _) -> (* We never cache ASTs for this provider. There'd be no use. *) (* The only valuable caching is to cache decls. *) parse_from_disk_no_caching ~apply_file_filter:false | (_, Provider_backend.Decl_service _) -> (* Decl service based checks are supposed to cache ASTs inside Provider_context.entries. *) (* This entries cache supports IDE scenarios of files locally modified in editor, which *) (* also makes it not performant enough on critical bulk checking path. *) (* Caching current AST locally makes it possible to avoid entries overhead, while not *) (* reparsing the file over and over. *) get_from_local_cache ~full ctx path let get_ast ~(full : bool) ctx path = get_ast_with_error ~full ctx path |> snd let get_def ~(full : bool) ctx file_name (node_getter : Nast.def -> ('a * string) option) (name_matcher : string -> bool) : 'a option = let defs = get_ast ~full ctx file_name in let rec get acc defs = List.fold_left defs ~init:acc ~f:(fun acc def -> match def with | Aast.Namespace (_, defs) -> get acc defs | _ -> begin match node_getter def with | Some (node, name) when name_matcher name -> Some node | _ -> acc end) in get None defs let find_class_impl (def : Nast.def) : (Nast.class_ * string) option = match def with | Aast.Class c -> Some (c, snd c.Aast.c_name) | _ -> None let find_fun_impl def = match def with | Aast.Fun f -> Some (f, snd f.Aast.fd_name) | _ -> None let find_typedef_impl def = match def with | Aast.Typedef t -> Some (t, snd t.Aast.t_name) | _ -> None let find_const_impl def = match def with | Aast.Constant cst -> Some (cst, snd cst.Aast.cst_name) | _ -> None let find_module_impl def = match def with | Aast.Module md -> Some (md, snd md.Aast.md_name) | _ -> None let iequal name = let name = Caml.String.lowercase_ascii name in (fun s -> String.equal name (Caml.String.lowercase_ascii s)) let find_class_in_file ~(full : bool) ctx file_name name = get_def ~full ctx file_name find_class_impl (String.equal name) let find_iclass_in_file ctx file_name iname = get_def ctx file_name find_class_impl (iequal iname) ~full:false let find_fun_in_file ~(full : bool) ctx file_name name = get_def ~full ctx file_name find_fun_impl (String.equal name) let find_ifun_in_file ctx file_name iname = get_def ctx file_name find_fun_impl (iequal iname) ~full:false let find_typedef_in_file ~(full : bool) ctx file_name name = get_def ~full ctx file_name find_typedef_impl (String.equal name) let find_itypedef_in_file ctx file_name iname = get_def ctx file_name find_typedef_impl (iequal iname) ~full:false let find_gconst_in_file ~(full : bool) ctx file_name name = get_def ~full ctx file_name find_const_impl (String.equal name) let find_module_in_file ~(full : bool) ctx file_name name = get_def ~full ctx file_name find_module_impl (String.equal name) let local_changes_push_sharedmem_stack () = ParserHeap.LocalChanges.push_stack () let local_changes_pop_sharedmem_stack () = ParserHeap.LocalChanges.pop_stack () let provide_ast_hint (path : Relative_path.t) (program : Nast.program) (parse_type : parse_type) : unit = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "Should not write into parser heap" | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> ParserHeap.write_around path (program, parse_type) | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> () let remove_batch paths = ParserHeap.remove_batch paths let has_for_test (path : Relative_path.t) : bool = ParserHeap.mem path let clear_parser_cache () = ParserHeap.Cache.clear () let clear_local_cache () = LocalParserCache.clear ()
OCaml Interface
hhvm/hphp/hack/src/providers/ast_provider.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) val find_class_in_file : full:bool -> Provider_context.t -> Relative_path.t -> string -> Nast.class_ option val find_iclass_in_file : Provider_context.t -> Relative_path.t -> string -> Nast.class_ option val find_fun_in_file : full:bool -> Provider_context.t -> Relative_path.t -> string -> Nast.fun_def option val find_ifun_in_file : Provider_context.t -> Relative_path.t -> string -> Nast.fun_def option val find_typedef_in_file : full:bool -> Provider_context.t -> Relative_path.t -> string -> Nast.typedef option val find_itypedef_in_file : Provider_context.t -> Relative_path.t -> string -> Nast.typedef option val find_gconst_in_file : full:bool -> Provider_context.t -> Relative_path.t -> string -> Nast.gconst option val find_module_in_file : full:bool -> Provider_context.t -> Relative_path.t -> string -> Nast.module_def option val get_ast : full:bool -> Provider_context.t -> Relative_path.t -> Nast.program val get_ast_with_error : full:bool -> Provider_context.t -> Relative_path.t -> Errors.t * Nast.program (** Compute the AST for the given [Provider_context.entry]. *) val compute_ast : popt:ParserOptions.t -> entry:Provider_context.entry -> Nast.program (** Compute the full [Parser_return.t] object. *) val compute_parser_return_and_ast_errors : popt:ParserOptions.t -> entry:Provider_context.entry -> Parser_return.t * Errors.t (** Compute the comments for the given [Provider_context.entry]. *) val compute_comments : popt:ParserOptions.t -> entry:Provider_context.entry -> Parser_return.comments (** Compute the [FileInfo.t] associated with the given entry, doing a parse if necessary. *) val compute_file_info : popt:ParserOptions.t -> entry:Provider_context.entry -> FileInfo.t (** Compute the [Full_fidelity_source_text.t] for this [Provider_context.entry]. *) val compute_source_text : entry:Provider_context.entry -> Full_fidelity_source_text.t (** Compute the concrete syntax tree for this [Provider_context.entry]. *) val compute_cst : ctx:Provider_context.t -> entry:Provider_context.entry -> Provider_context.PositionedSyntaxTree.t val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit type parse_type = | Decl | Full val provide_ast_hint : Relative_path.t -> Nast.program -> parse_type -> unit val remove_batch : Relative_path.Set.t -> unit val has_for_test : Relative_path.t -> bool val clear_parser_cache : unit -> unit val clear_local_cache : unit -> unit
TOML
hhvm/hphp/hack/src/providers/Cargo.toml
# @generated by autocargo [package] name = "rust_provider_backend_api" version = "0.0.0" edition = "2021" [lib] path = "rust_provider_backend_api.rs" test = false doctest = false [dependencies] file_provider = { version = "0.0.0", path = "../hackrs/file_provider/cargo/file_provider" } folded_decl_provider = { version = "0.0.0", path = "../hackrs/folded_decl_provider/cargo/folded_decl_provider" } naming_provider = { version = "0.0.0", path = "../hackrs/naming_provider/cargo/naming_provider" } shallow_decl_provider = { version = "0.0.0", path = "../hackrs/shallow_decl_provider/cargo/shallow_decl_provider" } ty = { version = "0.0.0", path = "../hackrs/ty/cargo/ty" }
OCaml
hhvm/hphp/hack/src/providers/db_path_provider.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude (** This sharedmem is used only for the Shared_memory and Analysis backends *) module Shared_db_settings = SharedMem.Heap (SharedMem.ImmediateBackend (SharedMem.NonEvictable)) (StringKey) (struct type t = Naming_sqlite.db_path let description = "NamingTableDatabaseSettings" end) (** SharedMem doesn't cache absences. So we write our own cache. *) let naming_db_path_cache : [ `Shmem_not_yet_cached_path | `Shmem_cached_path of Naming_sqlite.db_path option ] ref = ref `Shmem_not_yet_cached_path let get_naming_db_path (backend : Provider_backend.t) : Naming_sqlite.db_path option = match (backend, !naming_db_path_cache) with | ( ( Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Shared_memory | Provider_backend.Pessimised_shared_memory _ ), `Shmem_cached_path path_opt ) -> path_opt | ( ( Provider_backend.Analysis | Provider_backend.Shared_memory | Provider_backend.Pessimised_shared_memory _ ), `Shmem_not_yet_cached_path ) -> let path_opt = Shared_db_settings.get "database_path" in naming_db_path_cache := `Shmem_cached_path path_opt; path_opt | (Provider_backend.Rust_provider_backend backend, `Shmem_not_yet_cached_path) -> let path_opt = Rust_provider_backend.Naming.get_db_path backend in naming_db_path_cache := `Shmem_cached_path path_opt; path_opt | (Provider_backend.Local_memory { Provider_backend.naming_db_path_ref; _ }, _) -> !naming_db_path_ref | (Provider_backend.Decl_service _, _) -> failwith "decl provider doesn't expose naming db path" let set_naming_db_path (backend : Provider_backend.t) (naming_db_path : Naming_sqlite.db_path option) : unit = match backend with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Shared_db_settings.remove_batch (SSet.singleton "database_path"); Option.iter naming_db_path ~f:(Shared_db_settings.add "database_path"); naming_db_path_cache := `Shmem_cached_path naming_db_path | Provider_backend.Rust_provider_backend backend -> Option.iter naming_db_path ~f:(Rust_provider_backend.Naming.set_db_path backend); naming_db_path_cache := `Shmem_cached_path naming_db_path | Provider_backend.Local_memory { Provider_backend.naming_db_path_ref; _ } -> naming_db_path_ref := naming_db_path | Provider_backend.Decl_service _ -> failwith "decl provider doesn't expose naming db path"
OCaml Interface
hhvm/hphp/hack/src/providers/db_path_provider.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** the naming_db_path says where the naming-table sqlite file is found. *) val get_naming_db_path : Provider_backend.t -> Naming_sqlite.db_path option (** naming_db_path is set at initialization once we know the path. *) val set_naming_db_path : Provider_backend.t -> Naming_sqlite.db_path option -> unit
OCaml
hhvm/hphp/hack/src/providers/decl_provider.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude module Class = Typing_classes_heap.Api type fun_key = string type type_key = string type gconst_key = string type module_key = string type fun_decl = Typing_defs.fun_elt type class_decl = Typing_classes_heap.Api.t type typedef_decl = Typing_defs.typedef_type type gconst_decl = Typing_defs.const_decl type module_decl = Typing_defs.module_def_type let find_in_direct_decl_parse = Typedef_provider.find_in_direct_decl_parse (** This cache caches the result of full class computations (the class merged with all its inherited members.) *) module Cache = SharedMem.FreqCache (StringKey) (struct type t = Typing_classes_heap.class_t let description = "Decl_Typing_ClassType" end) (struct let capacity = 1000 end) let declare_folded_class (ctx : Provider_context.t) (name : type_key) : Decl_defs.decl_class_type * Decl_store.class_members option = match Provider_context.get_backend ctx with | Provider_backend.Analysis -> failwith "invalid" | _ -> (match Errors.run_in_decl_mode (fun () -> Decl_folded_class.class_decl_if_missing ~sh:SharedMem.Uses ctx name) with | None -> Decl_defs.raise_decl_not_found None name | Some decl_and_members -> decl_and_members) let lookup_or_populate_class_cache class_name populate = match Cache.get class_name with | Some _ as result -> result | None -> begin match populate class_name with | None -> None | Some v as result -> Cache.add class_name v; result end let get_class ?(tracing_info : Decl_counters.tracing_info option) (ctx : Provider_context.t) (class_name : type_key) : class_decl option = Decl_counters.count_decl ?tracing_info Decl_counters.Class class_name @@ fun counter -> (* There are several possibilities: LOCAL BACKEND - the class_t is cached in the local backend. SHAREDMEM BACKEND - the class_t is cached in the worker-local 'Cache' heap. Note that in the case of eager, the class_t is really just a fairly simple derivation of the decl_class_type that lives in shmem. DECL BACKEND - the class_t is cached in the worker-local 'Cache' heap *) match Provider_context.get_backend ctx with | Provider_backend.Analysis -> begin match lookup_or_populate_class_cache class_name (fun class_name -> Decl_store.((get ()).get_class class_name) |> Option.map ~f:Typing_classes_heap.make_eager_class_decl) with | None -> None | Some v -> Some (counter, v, Some ctx) end | Provider_backend.Pessimised_shared_memory _ -> begin (* No pessimisation needs to be done here directly. All pessimisation is * done on the shallow classes within [Shallow_classes_provider] that the * [Typing_classes_heap.Api.t] returned here is constructed from * Crucially, we do not use the [Cache] here, which would contain * outdated member types once we update its members during * pessimisation. *) match Typing_classes_heap.get ctx class_name declare_folded_class with | None -> None | Some v -> Some (counter, v, Some ctx) end | Provider_backend.Shared_memory | Provider_backend.Decl_service _ -> begin match lookup_or_populate_class_cache class_name (fun class_name -> Typing_classes_heap.get ctx class_name declare_folded_class) with | None -> None | Some v -> Some (counter, v, Some ctx) end | Provider_backend.Local_memory { Provider_backend.decl_cache; _ } -> let open Option.Monad_infix in Typing_classes_heap.get_class_with_cache ctx class_name decl_cache declare_folded_class >>| fun cls -> (counter, cls, Some ctx) | Provider_backend.Rust_provider_backend backend -> begin match lookup_or_populate_class_cache class_name (fun class_name -> Rust_provider_backend.Decl.get_folded_class backend (Naming_provider.rust_backend_ctx_proxy ctx) class_name |> Option.map ~f:Typing_classes_heap.make_eager_class_decl) with | None -> None | Some v -> Some (counter, v, Some ctx) end let maybe_pessimise_fun_decl ctx fun_decl = if Provider_context.implicit_sdt_for_fun ctx fun_decl then let no_auto_likes = Provider_context.no_auto_likes_for_fun fun_decl in Typing_defs. { fun_decl with fe_type = Decl_enforceability.( pessimise_fun_type ~fun_kind:Function ~this_class:None ~no_auto_likes ctx fun_decl.fe_pos fun_decl.fe_type); } else fun_decl let get_fun ?(tracing_info : Decl_counters.tracing_info option) (ctx : Provider_context.t) (fun_name : fun_key) : fun_decl option = let open Option.Let_syntax in Option.map ~f:(maybe_pessimise_fun_decl ctx) @@ Decl_counters.count_decl Decl_counters.Fun ?tracing_info fun_name @@ fun _counter -> match Provider_context.get_backend ctx with | Provider_backend.Analysis -> Decl_store.((get ()).get_fun fun_name) | Provider_backend.Pessimised_shared_memory info -> (match Decl_store.((get ()).get_fun fun_name) with | Some c -> Some c | None -> (match Naming_provider.get_fun_path ctx fun_name with | Some filename -> let* original_ft = find_in_direct_decl_parse ~cache_results:false ctx filename fun_name Shallow_decl_defs.to_fun_decl_opt in let ft = info.Provider_backend.pessimise_fun filename ~name:fun_name original_ft in if info.Provider_backend.store_pessimised_result then Decl_store.((get ()).add_fun) fun_name ft; Some ft | None -> None)) | Provider_backend.Shared_memory -> (match Decl_store.((get ()).get_fun fun_name) with | Some c -> Some c | None -> (match Naming_provider.get_fun_path ctx fun_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename fun_name Shallow_decl_defs.to_fun_decl_opt | None -> None)) | Provider_backend.Local_memory { Provider_backend.decl_cache; _ } -> Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Fun_decl fun_name) ~default:(fun () -> match Naming_provider.get_fun_path ctx fun_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename fun_name Shallow_decl_defs.to_fun_decl_opt | None -> None) | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_fun decl fun_name | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.get_fun backend (Naming_provider.rust_backend_ctx_proxy ctx) fun_name let maybe_pessimise_typedef_decl ctx typedef_decl = if TypecheckerOptions.everything_sdt (Provider_context.get_tcopt ctx) && not (Typing_defs.Attributes.mem Naming_special_names.UserAttributes.uaNoAutoDynamic typedef_decl.Typing_defs.td_attributes) then (* TODO: deal with super constraint *) match typedef_decl.Typing_defs.td_as_constraint with | Some _ -> typedef_decl | None -> let open Typing_defs in let pos = typedef_decl.td_pos in { typedef_decl with td_as_constraint = Some (Decl_enforceability.supportdyn_mixed pos (Reason.Rwitness_from_decl pos)); } else typedef_decl let get_typedef ?(tracing_info : Decl_counters.tracing_info option) (ctx : Provider_context.t) (typedef_name : type_key) : typedef_decl option = let open Option.Let_syntax in Option.map ~f:(maybe_pessimise_typedef_decl ctx) @@ Decl_counters.count_decl Decl_counters.Typedef ?tracing_info typedef_name @@ fun _counter -> match Provider_context.get_backend ctx with | Provider_backend.Analysis -> Decl_store.((get ()).get_typedef typedef_name) | Provider_backend.Shared_memory -> Typedef_provider.get_typedef ctx typedef_name | Provider_backend.Pessimised_shared_memory info -> (match Decl_store.((get ()).get_typedef typedef_name) with | Some c -> Some c | None -> (match Naming_provider.get_typedef_path ctx typedef_name with | Some filename -> let* original_typedef = find_in_direct_decl_parse ~cache_results:false ctx filename typedef_name Shallow_decl_defs.to_typedef_decl_opt in let typedef = info.Provider_backend.pessimise_typedef filename ~name:typedef_name original_typedef in if info.Provider_backend.store_pessimised_result then Decl_store.((get ()).add_typedef) typedef_name typedef; Some typedef | None -> None)) | Provider_backend.Local_memory { Provider_backend.decl_cache; _ } -> Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Typedef_decl typedef_name) ~default:(fun () -> match Naming_provider.get_typedef_path ctx typedef_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename typedef_name Shallow_decl_defs.to_typedef_decl_opt | None -> None) | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_typedef decl typedef_name | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.get_typedef backend (Naming_provider.rust_backend_ctx_proxy ctx) typedef_name let get_gconst ?(tracing_info : Decl_counters.tracing_info option) (ctx : Provider_context.t) (gconst_name : gconst_key) : gconst_decl option = let open Option.Let_syntax in Decl_counters.count_decl Decl_counters.GConst ?tracing_info gconst_name @@ fun _counter -> match Provider_context.get_backend ctx with | Provider_backend.Analysis -> Decl_store.((get ()).get_gconst gconst_name) | Provider_backend.Pessimised_shared_memory info -> (match Decl_store.((get ()).get_gconst gconst_name) with | Some c -> Some c | None -> (match Naming_provider.get_const_path ctx gconst_name with | Some filename -> let* original_gconst = find_in_direct_decl_parse ~cache_results:false ctx filename gconst_name Shallow_decl_defs.to_const_decl_opt in let gconst = info.Provider_backend.pessimise_gconst filename ~name:gconst_name original_gconst in (if info.Provider_backend.store_pessimised_result then Decl_store.((get ()).add_gconst gconst_name gconst)); Some gconst | None -> None)) | Provider_backend.Shared_memory -> (match Decl_store.((get ()).get_gconst gconst_name) with | Some c -> Some c | None -> (match Naming_provider.get_const_path ctx gconst_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename gconst_name Shallow_decl_defs.to_const_decl_opt | None -> None)) | Provider_backend.Local_memory { Provider_backend.decl_cache; _ } -> Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Gconst_decl gconst_name) ~default:(fun () -> match Naming_provider.get_const_path ctx gconst_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename gconst_name Shallow_decl_defs.to_const_decl_opt | None -> None) | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_gconst decl gconst_name | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.get_gconst backend (Naming_provider.rust_backend_ctx_proxy ctx) gconst_name let get_module ?(tracing_info : Decl_counters.tracing_info option) (ctx : Provider_context.t) (module_name : module_key) : module_decl option = Decl_counters.count_decl Decl_counters.Module_decl ?tracing_info module_name @@ fun _counter -> let fetch_from_backing_store () = Naming_provider.get_module_path ctx module_name |> Option.bind ~f:(fun filename -> find_in_direct_decl_parse ~cache_results:true ctx filename module_name Shallow_decl_defs.to_module_decl_opt) in match Provider_context.get_backend ctx with | Provider_backend.Analysis -> Decl_store.((get ()).get_module module_name) | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (match Decl_store.((get ()).get_module module_name) with | Some m -> Some m | None -> fetch_from_backing_store ()) | Provider_backend.Local_memory { Provider_backend.decl_cache; _ } -> Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Module_decl module_name) ~default:fetch_from_backing_store | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_module decl module_name | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.get_module backend (Naming_provider.rust_backend_ctx_proxy ctx) module_name let get_overridden_method ctx ~class_name ~method_name ~is_static : Typing_defs.class_elt option = let open Option.Monad_infix in get_class ctx class_name >>= fun cls -> Class.overridden_method cls ~method_name ~is_static ~get_class (** This is a subtle function! If there is a winner defined for [name_type / name] with that exact same capitalization, then it will return its position, otherwise it will return None. Example: [type t=int; type T=string;] where "t=int" is the winner. * [get_pos_from_decl_of_winner Typedef "t"] --> Some * [get_pos_from_decl_of_winner Typedef "T"] --> None * [get_pos_from_decl_of_winner Class "T"] --> None Example: [type tc=int; class tc {}] where "class tc" is the winner. * [get_pos_from_decl_of_winner Class "tc"] --> Some * [get_pos_from_decl_of_winner Typedef "tc"] --> None * [get_pos_from_decl_of_winner Class "TC"] --> None Why such a subtle function? We're trying to thread the needle between what's efficient to obtain from the decl-provider without needing a costly step of "obtain the canonical capitalization of this symbol", vs what's the minimum correctness needed to support the function [is_this_def_the_winner]. Several branches of this function must first query Naming_provider before it's safe to go on and query Decl_provider. This introduces the possibility of disk races e.g. if the file has changed on disk but we haven't yet gotten around to processing the file-change notification, then Naming_provider will tell us that the symbol is in the file but when Decl_provider comes to read it off disk then it's no longer there. It's also vulnerable to a weird quirk with namespaces. Consider "namespace N; namespace M {function f(){} }". This gives Parsing[1002] "Cannot mix bracketed+unbracketed namespaces", but naturally AST-parser and direct-decl-parser both still have to produce something for it. AST claims it has symbol M\f, while direct-decl-parser claims N\M\f. If the naming-table happened to give results from AST-parser (e.g. as happens when Provider_context.entry provides overrides to the naming-table) then we'll hit the same situation. In both these cases, we return None from this function, indicating that we couldn't find a decl position. It'd be better if naming and decl providers were more tightly coupled, and had consistency-correctness, so that neither case were possible. *) let get_pos_from_decl_of_winner_FOR_TESTS_ONLY ctx name_type name : Pos.t option = let pos_opt = match name_type with | FileInfo.Fun -> if Naming_provider.get_fun_path ctx name |> Option.is_some then get_fun ctx name |> Option.map ~f:(fun { Typing_defs.fe_pos; _ } -> fe_pos) else None | FileInfo.Typedef -> if Naming_provider.get_typedef_path ctx name |> Option.is_some then get_typedef ctx name |> Option.map ~f:(fun { Typing_defs.td_pos; _ } -> td_pos) else None | FileInfo.Class -> if Naming_provider.get_class_path ctx name |> Option.is_some then get_class ctx name |> Option.map ~f:(fun cls -> Class.pos cls) else None | FileInfo.Const -> Option.map (get_gconst ctx name) ~f:(fun { Typing_defs.cd_pos; _ } -> cd_pos) | FileInfo.Module -> Option.map (get_module ctx name) ~f:(fun { Typing_defs.mdt_pos; _ } -> mdt_pos) in Option.map pos_opt ~f:Pos_or_decl.unsafe_to_raw_pos type winner = | Winner | Loser_to of Pos.t | Not_found let is_this_def_the_winner ctx name_type (pos, name) = match (get_pos_from_decl_of_winner_FOR_TESTS_ONLY ctx name_type name, name_type) with | (Some winner_pos, _) when Pos.overlaps pos winner_pos -> (* There is a winner decl for [name_type name], the exact same name_type and capitalization as we provided, and its position overlaps. Therefore [name_type / name / pos] is the winner! The winner for [name_type / name] has the exact same capitalization and name_type, We use "overlaps" to allow flexibility for whether the pos associated with the AST refers to the pos of the identifier token or the pos of the whole thing, and likewise the pos associated the decl. *) Winner | (Some winner_pos, _) -> (* There is a winner decl for [name_type name], the exact same name_type and capitalization as we provided, but it is at a different position. Therefore we are the loser. *) Loser_to winner_pos | (None, FileInfo.(Const | Module)) -> (* There is no winner decl for [name_type name]. These name-types are case-sensitive, so we don't need to look further. *) Not_found | (None, FileInfo.Fun) -> begin (* If there wasn't a winner decl for [Fun name], then maybe there is for a different capitalization of [Name]? Note: this codepath results in either [Not_found] or [Loser_to], neither of which occur in a program that typechecks clean, so it's okay if they're a little slow. It technically has one path which returns [Winner] but we believe and assert that it will never arise. *) match Naming_provider.get_fun_canon_name ctx name with | None -> Not_found | Some cname -> begin match get_pos_from_decl_of_winner_FOR_TESTS_ONLY ctx name_type cname with | None -> Not_found | Some winner_pos when Pos.overlaps pos winner_pos -> HackEventLogger.invariant_violation_bug "caller provided wrong capitalization of fun name (unnecessarily slow path; should avoid)" ~pos:(Pos.to_relative_string pos |> Pos.string) ~data:(Printf.sprintf "name=%s cname=%s" name cname); Winner | Some winner_pos -> Loser_to winner_pos end end | (None, FileInfo.(Class | Typedef)) -> (* If there wasn't a winner decl for [name_type name], then maybe there is a winning decl for a different capitalization of [name]? or for the other [name_type]? Note: this codepath results in either [Not_found] or [Loser_to], niether of which occur in a program that typechecks clean, so it's okay if they're a little slow. It technically has one path which returns [Winner] but we believe and assert that it will never arise. *) (match Naming_provider.get_type_canon_name ctx name with | None -> Not_found | Some cname -> let winner_pos_opt = Option.first_some (get_pos_from_decl_of_winner_FOR_TESTS_ONLY ctx FileInfo.Class cname) (get_pos_from_decl_of_winner_FOR_TESTS_ONLY ctx FileInfo.Typedef cname) in (match winner_pos_opt with | None -> Not_found | Some winner_pos when Pos.overlaps pos winner_pos -> HackEventLogger.decl_consistency_bug "caller provided wrong capitalization of type name (unnecessarily slow path; should avoid)" ~pos:(Pos.to_relative_string pos |> Pos.string) ~data:(Printf.sprintf "name=%s cname=%s" name cname); Winner | Some winner_pos -> Loser_to winner_pos)) let local_changes_push_sharedmem_stack () = Decl_store.((get ()).push_local_changes ()) let local_changes_pop_sharedmem_stack () = Decl_store.((get ()).pop_local_changes ()) let declare_folded_class_in_file_FOR_TESTS_ONLY ctx cid = fst (declare_folded_class ctx cid)
OCaml Interface
hhvm/hphp/hack/src/providers/decl_provider.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** Provides decls from the configured backend, e.g. shared memory, local memory, service, etc. *) type fun_key = string type type_key = string type gconst_key = string type module_key = string module Class : sig include module type of Typing_classes_heap.Api end type fun_decl = Typing_defs.fun_elt type class_decl = Class.t type typedef_decl = Typing_defs.typedef_type type gconst_decl = Typing_defs.const_decl type module_decl = Typing_defs.module_def_type val get_fun : ?tracing_info:Decl_counters.tracing_info -> Provider_context.t -> fun_key -> fun_decl option val get_class : ?tracing_info:Decl_counters.tracing_info -> Provider_context.t -> type_key -> class_decl option val get_typedef : ?tracing_info:Decl_counters.tracing_info -> Provider_context.t -> type_key -> typedef_decl option val get_gconst : ?tracing_info:Decl_counters.tracing_info -> Provider_context.t -> gconst_key -> gconst_decl option val get_module : ?tracing_info:Decl_counters.tracing_info -> Provider_context.t -> module_key -> module_decl option (** This assumes that [class_name] defines and overrides [method_name] and returns the method from an ancestor of [class_name] that would have been inherited by [class_name] had it not overridden it. *) val get_overridden_method : Provider_context.t -> class_name:type_key -> method_name:string -> is_static:bool -> Typing_defs.class_elt option (** Return type for [is_this_def_the_winner] *) type winner = | Winner (** yes it is the winner *) | Loser_to of Pos.t (** a different definition is the winner *) | Not_found (** there is no winning definition for [name_type / name] *) (** [is_this_def_the_winner ctx name_kind (pos, name)] judges whether the the definition at [name / name_kind / pos] is deemed the "winner" in the naming table. Normally when a symbol is defined only once, then it is the winner. But if a name is defined multiple times (maybe differing in case), then only one of those definitions is the winner, be they in the same file or different. Examples: * [type tc=int; class tc {}] - these conflict, and only one may be deemed the winner * [type t=int; type T=string;] - these conflict, and only one may be deemed the winner Most other functions e.g. [Decl_provider.get_typedef] are not sensitive to winners, and will happily return information about losers if so requested! For instance, * [get_typedef "tc"] will return the typedef "type tc=int" even if "class tc {}" were the winner * [get_typedef "T"] will return the typedef "type T=string" even if "type t=int" were the winner This function returns [Winner] if the supplied definition is the winner, or [Loser_to winner_pos] it it isn't the winner, or [Not_found] if there's no winner for any capitalization of [name] in category [name_type] (nor, in the case of Class/Typedef, in the other category]. It is an error to call this function with a [name_type / pos / name] where [name_type / pos / different_capitalization_of_name] is the winner; this will lead to an exception. This function is fast in the common case where it returns [Winner]. The intended scenario is that (1) if the caller is iterating over all toplevel definitions in an AST and calling this function on them, then by construction it will never return [Not_found] or fail with an exception, (2) the only time this function returns [Loser_to] is cases that report "duplicate name" errors so it doesn't matter if they're a little slow. We need to talk about the implementation strategy, because Decl_provider and Naming_provider are leaky APIs and so the implementation of this function is relevant to callers... * In cases where it's needed to disambiguate winner/loser, this function consults the reverse naming-table. Thus it depends for speed upon naming-table being cached. * This function obtains the winning decl's position by reading the decl from Decl_provider and all such decls are position-full. Thus it depends for speed upon decls being cached. It is faster for instance than parsing ASTs, as is done by e.g. [Naming_provider.get_type_full_pos_by_parsing_file] * This function uses the horrid and slow [Naming_provider.get_{fun,type}_canon_name] in case there wasn't a winner using the same capitalization as what was provided, and so looks for the decl under that correct capitalization. It would be great if the decl-provider could become more rigorous, so that for instance [Decl_provider.get_class ctx name] would only return a decl if it were the winner. Then we could do without the Naming-table cache. It would be great to avoid [Naming_provider.get_{fun,type}_canon_name]. That will only be possible in a future where Decl_provider supports case-insensitive decl lookups. It's still okay if such lookups are slow - the only programs which cause us to do these case-insensitive lookups will be programs with duplicate-name errors where the duplicates differ in case. If in future we decide not to use position-full decls, that will be easy. We'll still need the decl-provider to tell us at least a filename, and we'll use that filename to resolve winners/loosers that come from different files, and we'll have this function take an [file_ast] parameter to resolve winners/loosers that come from the same file. *) val is_this_def_the_winner : Provider_context.t -> FileInfo.name_type -> Ast_defs.id -> winner (** Internal helper used by [is_this_def_the_winner]. Exposed solely for testing in hh_single_decl; must not be used elsewhere *) val get_pos_from_decl_of_winner_FOR_TESTS_ONLY : Provider_context.t -> FileInfo.name_type -> string -> Pos.t option val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit val lookup_or_populate_class_cache : type_key -> (type_key -> Typing_classes_heap.class_t option) -> Typing_classes_heap.class_t option val declare_folded_class_in_file_FOR_TESTS_ONLY : Provider_context.t -> type_key -> Decl_defs.decl_class_type
Rust
hhvm/hphp/hack/src/providers/decl_provider.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. // In Ocaml, providers return Typing_defs.fun_elt, and Shallow_decl_defs.fun_elt is an alias to it, so // this signature is accurate. In case of class declarations, Typing_defs.class_type is a separate type, which // folds many shallow class declarations into one. We don't have this type / logic in Rust yet. pub type FunDecl<'a> = oxidized_by_ref::shallow_decl_defs::FunElt<'a>; pub type ClassDecl<'a> = oxidized_by_ref::shallow_decl_defs::ShallowClass<'a>; pub trait DeclProvider { fn get_fun(&self, s: &str) -> Option<&FunDecl<'_>>; fn get_shallow_class(&self, s: &str) -> Option<&ClassDecl<'_>>; }
OCaml
hhvm/hphp/hack/src/providers/direct_decl_utils.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude type parsed_file_with_hashes = Direct_decl_parser.parsed_file_with_hashes = { pfh_mode: FileInfo.mode option; pfh_hash: Int64.t; pfh_decls: (string * Shallow_decl_defs.decl * Int64.t) list; } (* If any decls in the list have the same name, retain only the first declaration of each symbol in the sequence. *) (* NB: Must be manually kept in sync with hackrs functions [shallow_decl_provider::LazyShallowDeclProvider::dedup_and_add_decls] and [hackrs_provider_backend::HhServerProviderBackend::dedup_and_add_decls]. *) let dedup_decls decls = let open Shallow_decl_defs in let seen_types = String.Table.create () in let seen_funs = String.Table.create () in let seen_consts = String.Table.create () in let seen_modules = String.Table.create () in Sequence.filter decls ~f:(fun decl -> match decl with | (name, Class _) | (name, Typedef _) -> if String.Table.mem seen_types name then false else let () = String.Table.add_exn seen_types ~key:name ~data:() in true | (name, Fun _) -> if String.Table.mem seen_funs name then false else let () = String.Table.add_exn seen_funs ~key:name ~data:() in true | (name, Const _) -> if String.Table.mem seen_consts name then false else let () = String.Table.add_exn seen_consts ~key:name ~data:() in true | (name, Module _) -> if String.Table.mem seen_modules name then false else let () = String.Table.add_exn seen_modules ~key:name ~data:() in true) (* If a symbol was also declared in another file, and that file was determined to be the winner in the naming table, remove its decl from the list. Do not remove decls if there is no entry for them in the naming table. This ensures that this path can populate the decl heap during full inits. This may result in the decl heap containing an incorrect decl in the event of a naming conflict, which might be confusing. But the user will be obligated to fix the naming conflict, and this behavior is limited to full inits, so maybe we can live with it. *) (* NB: Must be manually kept in sync with hackrs functions [shallow_decl_provider::LazyShallowDeclProvider::remove_naming_conflict_losers] and [hackrs_provider_backend::HhServerProviderBackend::remove_naming_conflict_losers]. *) let remove_naming_conflict_losers ctx file decls = let open Shallow_decl_defs in Sequence.filter decls ~f:(fun decl -> match decl with | (name, Class _) | (name, Typedef _) -> (match Naming_provider.get_type_path ctx name with | Some nfile -> Relative_path.equal nfile file | None -> true) | (name, Fun _) -> (match Naming_provider.get_fun_path ctx name with | Some nfile -> Relative_path.equal nfile file | None -> true) | (name, Const _) -> (match Naming_provider.get_const_path ctx name with | Some nfile -> Relative_path.equal nfile file | None -> true) | (name, Module _) -> (match Naming_provider.get_module_path ctx name with | Some nfile -> Relative_path.equal nfile file | None -> true)) let cache_decls ctx file decls = let open Shallow_decl_defs in let open Typing_defs in let decls = decls |> List.rev_map (* direct decl parser produces reverse of syntactic order *) ~f:(fun (name, decl, _hash) -> (name, decl)) |> Sequence.of_list |> dedup_decls |> remove_naming_conflict_losers ctx file |> Sequence.to_list in match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ -> (* We must never perform caching here. Otherwise, we may overwrite earlier pessimisation results with unpessimised types *) failwith "invalid" | Provider_backend.Analysis | Provider_backend.Shared_memory -> List.iter decls ~f:(function | (name, Class decl) -> Shallow_classes_heap.Classes.add name decl | (name, Fun decl) -> Decl_store.((get ()).add_fun name decl) | (name, Typedef decl) -> Decl_store.((get ()).add_typedef name decl) | (name, Const decl) -> Decl_store.((get ()).add_gconst name decl) | (name, Module decl) -> Decl_store.((get ()).add_module name decl)) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.add_shallow_decls backend decls | Provider_backend.(Local_memory { decl_cache; shallow_decl_cache; _ }) -> List.iter decls ~f:(function | (name, Class decl) -> let (_ : shallow_class option) = Provider_backend.Shallow_decl_cache.find_or_add shallow_decl_cache ~key: (Provider_backend.Shallow_decl_cache_entry.Shallow_class_decl name) ~default:(fun () -> Some decl) in () | (name, Fun decl) -> let (_ : fun_elt option) = Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Fun_decl name) ~default:(fun () -> Some decl) in () | (name, Typedef decl) -> let (_ : typedef_type option) = Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Typedef_decl name) ~default:(fun () -> Some decl) in () | (name, Const decl) -> let (_ : const_decl option) = Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Gconst_decl name) ~default:(fun () -> Some decl) in () | (name, Module decl) -> let (_ : module_decl option) = Provider_backend.Decl_cache.find_or_add decl_cache ~key:(Provider_backend.Decl_cache_entry.Module_decl name) ~default:(fun () -> Some decl) in ()) | Provider_backend.Decl_service _ -> failwith "Direct_decl_utils.cache_file_decls not implemented for Decl_service" let get_file_contents ~ignore_file_content_caches ctx filename = let from_entries = if ignore_file_content_caches then None else Naming_provider.get_entry_contents ctx filename in match from_entries with | Some _ as contents_opt -> contents_opt | None -> File_provider.get_contents ~force_read_disk:ignore_file_content_caches filename let direct_decl_parse ?(ignore_file_content_caches = false) ctx file = Counters.count Counters.Category.Get_decl @@ fun () -> match get_file_contents ~ignore_file_content_caches ctx file with | None -> None | Some contents -> let popt = Provider_context.get_popt ctx in let opts = DeclParserOptions.from_parser_options popt in let deregister_php_stdlib_if_hhi = ParserOptions.deregister_php_stdlib popt in let parsed_file = Direct_decl_parser.parse_and_hash_decls opts deregister_php_stdlib_if_hhi file contents in Some parsed_file let direct_decl_parse_and_cache ctx file = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Counters.count Counters.Category.Get_decl @@ fun () -> get_file_contents ~ignore_file_content_caches:false ctx file |> Option.map ~f:(fun contents -> Rust_provider_backend.Decl.direct_decl_parse_and_cache backend file contents) | _ -> let result = direct_decl_parse ctx file in (match result with | Some parsed_file -> cache_decls ctx file parsed_file.pfh_decls | None -> ()); result let decls_to_fileinfo = Direct_decl_parser.decls_to_fileinfo
OCaml Interface
hhvm/hphp/hack/src/providers/direct_decl_utils.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type parsed_file_with_hashes = Direct_decl_parser.parsed_file_with_hashes = { pfh_mode: FileInfo.mode option; pfh_hash: Int64.t; pfh_decls: (string * Shallow_decl_defs.decl * Int64.t) list; } (** NOTE: this produces decls in reverse lexical order *) val direct_decl_parse_and_cache : Provider_context.t -> Relative_path.t -> Direct_decl_parser.parsed_file_with_hashes option (** NOTE: this produces decls in reverse lexical order *) val direct_decl_parse : ?ignore_file_content_caches:bool -> Provider_context.t -> Relative_path.t -> Direct_decl_parser.parsed_file_with_hashes option val cache_decls : Provider_context.t -> Relative_path.t -> (string * Shallow_decl_defs.decl * Int64.t) list -> unit (** NOTE: this takes decls in reverse lexical order, and emits a FileInfo.t with them in forward lexical order *) val decls_to_fileinfo : Relative_path.t -> Direct_decl_parser.parsed_file_with_hashes -> FileInfo.t
hhvm/hphp/hack/src/providers/dune
(library (name provider_backend) (modules provider_backend) (libraries collections decl_service_client decl_store rust_provider_backend_stubs heap_shared_mem lfu_cache naming_sqlite naming_types pos relative_path shallow_decl_defs typechecker_options typing_class_types typing_defs) (preprocess (pps ppx_deriving.std))) (library (name provider_context) (modules provider_context) (libraries ast collections nast package_info parser provider_backend server_command_types typing_ast typing_deps relative_path) (preprocess (pps ppx_deriving.std))) (library (name lfu_cache) (modules lfu_cache) (libraries core_kernel utils_core) (preprocess (pps ppx_deriving.std))) (library (name file_provider) (modules file_provider) (libraries ast provider_backend relative_path rust_provider_backend_stubs sys_utils) (preprocess (pps ppx_deriving.std))) (library (name ast_provider) (modules ast_provider) (libraries ast counters file_provider heap_shared_mem nast parser provider_context relative_path server_command_types typechecker_options utils_find) (preprocess (pps ppx_deriving.std))) (library (name decl_provider) (modules decl_provider) (libraries counters decl_counters decl_class decl_enforceability decl_service_client direct_decl_utils provider_backend rust_provider_backend_stubs shallow_classes_provider shallow_decl_defs typedef_provider typing_heap) (preprocess (pps ppx_deriving.std))) (library (name fixme_provider) (modules fixme_provider) (libraries collections errors heap_shared_mem provider_backend) (preprocess (pps ppx_deriving.std))) (library (name naming_provider) (modules naming_provider) (libraries annotated_ast ast db_path_provider file_info full_fidelity naming_heap pos relative_path rust_provider_backend_stubs shallow_decl_defs typing_defs) (preprocess (pps ppx_deriving.std))) (library (name provider_utils) (modules provider_utils) (libraries ast_provider collections errors naming nast parser provider_context relative_path rust_provider_backend_stubs server_command_types server_env counters typechecker_options typing) (preprocess (pps ppx_deriving.std))) (library (name shallow_classes_provider) (modules shallow_classes_provider) (libraries bloom_filter decl_service_client direct_decl_utils provider_backend provider_context remote_old_decl_client rust_provider_backend_stubs shallow_classes_heap) (preprocess (pps ppx_deriving.std))) (library (name tast_provider) (modules tast_provider) (libraries decl_counters decl_provider provider_utils tast_env typing_toplevel) (preprocess (pps ppx_deriving.std))) (library (name db_path_provider) (modules db_path_provider) (libraries naming_sqlite provider_backend provider_context rust_provider_backend_stubs) (preprocess (pps ppx_deriving.std))) (library (name direct_decl_utils) (modules direct_decl_utils) (libraries annotated_ast ast_provider collections decl_defs decl_store decl_parser_options direct_decl_parser errors file_provider full_fidelity heap_shared_mem naming naming_provider nast parser parser_options provider_backend provider_context relative_path rust_provider_backend_stubs shallow_classes_heap shallow_decl_defs typechecker_options) (preprocess (pps ppx_deriving.std))) (library (name typedef_provider) (modules typedef_provider) (libraries decl_counters decl_nast direct_decl_utils provider_context naming_provider))
OCaml
hhvm/hphp/hack/src/providers/file_provider.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude (* Shared memory heap containing the contents of files. Acts as a sort of caching facade which is filled on-demand as contents are needed - The "cache" is filled by loading from the file system if the file isn't opened in the IDE (otherwise uses the IDE contents). That is, the IDE version take precedence over file system's. *) type file_type = Rust_provider_backend.File.file_type = | Disk of string | Ide of string exception File_provider_stale module FileHeap = struct include SharedMem.Heap (SharedMem.ImmediateBackend (SharedMem.Evictable)) (Relative_path.S) (struct type t = file_type let description = "File" end) let replace_nonatomic key value = if mem key then remove key; add key value end let read_file_contents_from_disk (fn : Relative_path.t) : string option = try Some (Sys_utils.cat (Relative_path.to_absolute fn)) with | _ -> None let get fn = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> FileHeap.get fn | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.File.get backend fn | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.get not supported with local/decl memory provider" let get_unsafe fn = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> begin match get fn with | Some contents -> contents | None -> failwith ("File not found: " ^ Relative_path.to_absolute fn) end | Provider_backend.Rust_provider_backend backend -> begin match Rust_provider_backend.File.get backend fn with | Some contents -> contents | None -> failwith ("File not found: " ^ Relative_path.to_absolute fn) end | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.get_unsafe not supported with local/decl memory provider" let get_contents ?(force_read_disk = false) fn = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> let from_cache = if force_read_disk then None else FileHeap.get fn in (match from_cache with | Some (Ide f) -> Some f | Some (Disk contents) -> Some contents | None -> let contents = Option.value (read_file_contents_from_disk fn) ~default:"" in Some contents) | Provider_backend.Rust_provider_backend backend -> Some (Rust_provider_backend.File.get_contents backend fn) | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> read_file_contents_from_disk fn let get_ide_contents_unsafe fn = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> begin match FileHeap.get fn with | Some (Ide f) -> f | _ -> failwith ("IDE file not found: " ^ Relative_path.to_absolute fn) end | Provider_backend.Rust_provider_backend backend -> begin match Rust_provider_backend.File.get backend fn with | Some (Ide f) -> f | _ -> failwith ("IDE file not found: " ^ Relative_path.to_absolute fn) end | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith ("File_provider.get_ide_contents_unsafe not supported " ^ "with local/decl memory provider") let provide_file_for_tests fn contents = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> FileHeap.replace_nonatomic fn (Disk contents) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.File.provide_file_for_tests backend fn contents | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.provide_file_for_tests not supported with local/decl memory provider" let provide_file_for_ide fn contents = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> FileHeap.add fn (Ide contents) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.File.provide_file_for_ide backend fn contents | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.provide_file_for_ide not supported with local/decl memory provider" let provide_file_hint fn contents = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (match contents with | Ide _ -> FileHeap.add fn contents | Disk _ -> ()) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.File.provide_file_hint backend fn contents | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.provide_file_hint not supported with local/decl memory provider" let remove_batch paths = match Provider_backend.get () with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> FileHeap.remove_batch paths | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.File.remove_batch backend paths | Provider_backend.Local_memory _ | Provider_backend.Decl_service _ -> failwith "File_provider.remove_batch not supported with local/decl memory provider" let local_changes_push_sharedmem_stack () = FileHeap.LocalChanges.push_stack () let local_changes_pop_sharedmem_stack () = FileHeap.LocalChanges.pop_stack ()
OCaml Interface
hhvm/hphp/hack/src/providers/file_provider.mli
(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type file_type = Rust_provider_backend.File.file_type = | Disk of string | Ide of string exception File_provider_stale val get : Relative_path.t -> file_type option val get_unsafe : Relative_path.t -> file_type val get_contents : ?force_read_disk:bool -> Relative_path.t -> string option val get_ide_contents_unsafe : Relative_path.t -> string val provide_file_for_tests : Relative_path.t -> string -> unit val provide_file_for_ide : Relative_path.t -> string -> unit val provide_file_hint : Relative_path.t -> file_type -> unit val remove_batch : Relative_path.Set.t -> unit val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit
OCaml
hhvm/hphp/hack/src/providers/fixme_provider.ml
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude module Fixme_store = Provider_backend.Fixme_store open Provider_backend.Fixmes (*****************************************************************************) (* Table containing all the HH_FIXMEs found in the source code. * Associates: * filename => * line number guarded by HH_FIXME => * error_node_number => * position of HH_FIXME comment *) (*****************************************************************************) type fixme_map = Provider_backend.fixme_map module HH_FIXMES = SharedMem.HeapWithLocalCache (SharedMem.ImmediateBackend (SharedMem.NonEvictable)) (Relative_path.S) (struct type t = fixme_map let description = "Fixme_HH_FIXMES" end) (struct let capacity = 1000 end) module DECL_HH_FIXMES = SharedMem.HeapWithLocalCache (SharedMem.ImmediateBackend (SharedMem.NonEvictable)) (Relative_path.S) (struct type t = fixme_map let description = "Fixme_DECL_HH_FIXMES" end) (struct let capacity = 1000 end) module DISALLOWED_FIXMES = SharedMem.HeapWithLocalCache (SharedMem.ImmediateBackend (SharedMem.NonEvictable)) (Relative_path.S) (struct type t = fixme_map let description = "Fixme_DISALLOWED_FIXMES" end) (struct let capacity = 1000 end) let get_fixmes filename = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (match HH_FIXMES.get filename with | None -> DECL_HH_FIXMES.get filename | Some x -> Some x) | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> (match Fixme_store.get fixmes.hh_fixmes filename with | None -> Fixme_store.get fixmes.decl_hh_fixmes filename | Some x -> Some x) let get_hh_fixmes filename = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> HH_FIXMES.get filename | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.get fixmes.hh_fixmes filename let get_decl_hh_fixmes filename = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> DECL_HH_FIXMES.get filename | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.get fixmes.decl_hh_fixmes filename let get_disallowed_fixmes filename = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> DISALLOWED_FIXMES.get filename | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.get fixmes.disallowed_fixmes filename let provide_hh_fixmes filename fixme_map = if not (IMap.is_empty fixme_map) then match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> HH_FIXMES.add filename fixme_map | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.add fixmes.hh_fixmes filename fixme_map let provide_decl_hh_fixmes filename fixme_map = if not (IMap.is_empty fixme_map) then match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> DECL_HH_FIXMES.add filename fixme_map | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.add fixmes.decl_hh_fixmes filename fixme_map let provide_disallowed_fixmes filename fixme_map = if not (IMap.is_empty fixme_map) then match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> DISALLOWED_FIXMES.add filename fixme_map | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.add fixmes.disallowed_fixmes filename fixme_map let remove_batch paths = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> HH_FIXMES.remove_batch paths; DECL_HH_FIXMES.remove_batch paths; DISALLOWED_FIXMES.remove_batch paths | Provider_backend.Local_memory { Provider_backend.fixmes; _ } | Provider_backend.Decl_service { fixmes; _ } -> Fixme_store.remove_batch fixmes.hh_fixmes paths; Fixme_store.remove_batch fixmes.decl_hh_fixmes paths; Fixme_store.remove_batch fixmes.disallowed_fixmes paths let local_changes_push_sharedmem_stack () = HH_FIXMES.LocalChanges.push_stack (); DECL_HH_FIXMES.LocalChanges.push_stack (); DISALLOWED_FIXMES.LocalChanges.push_stack () let local_changes_pop_sharedmem_stack () = HH_FIXMES.LocalChanges.pop_stack (); DECL_HH_FIXMES.LocalChanges.pop_stack (); DISALLOWED_FIXMES.LocalChanges.pop_stack () let fixme_was_applied applied_fixmes fn err_line err_code = match Relative_path.Map.find_opt applied_fixmes fn with | None -> false | Some r -> (match IMap.find_opt err_line r with | None -> false | Some code_set -> ISet.mem err_code code_set) let add_applied_fixme_file m err_code err_line = let line_value = match IMap.find_opt err_line m with | None -> ISet.empty | Some x -> x in IMap.add err_line (ISet.add err_code line_value) m let add_applied_fixme applied_fixmes err_code fn err_line = let file_value = match Relative_path.Map.find_opt applied_fixmes fn with | None -> IMap.empty | Some x -> x in Relative_path.Map.add applied_fixmes ~key:fn ~data:(add_applied_fixme_file file_value err_code err_line) let get_unused_fixmes_for codes applied_fixme_map fn acc = match get_fixmes fn with | None -> acc | Some fixme_map -> IMap.fold (fun line code_map acc -> IMap.fold (fun code fixme_pos acc -> if (List.mem codes code ~equal:( = ) || (List.is_empty codes && code < 5000)) && not (fixme_was_applied applied_fixme_map fn line code) then fixme_pos :: acc else acc) code_map acc) fixme_map acc let get_unused_fixmes ~codes ~applied_fixmes ~fold ~files_info = let applied_fixme_map = List.fold_left applied_fixmes ~init:Relative_path.Map.empty ~f:(fun acc (pos, code) -> let fn = Pos.filename pos in let (line, _, _) = Pos.info_pos pos in add_applied_fixme acc code fn line) in fold files_info ~init:[] ~f:(fun fn _ acc -> get_unused_fixmes_for codes applied_fixme_map fn acc) (*****************************************************************************) (* We register the function that can look up a position and determine if * a given position is affected by an HH_FIXME. We use a reference to avoid * a cyclic dependency: everything depends on the Errors module (the module * defining all the errors), because of that making the Errors module call * into anything that isn't in the standard library is very unwise, because * that code won't be able to add errors. *) (*****************************************************************************) let get_fixmes_for_pos pos = let filename = Pos.filename pos in let (line, _, _) = Pos.info_pos pos in get_fixmes filename |> Option.value ~default:IMap.empty |> IMap.find_opt line |> Option.value ~default:IMap.empty let get_fixme_codes_for_pos pos = get_fixmes_for_pos pos |> IMap.keys |> ISet.of_list let is_disallowed pos code = let filename = Pos.filename pos in let (line, _, _) = Pos.info_pos pos in let fixme_map_opt = match Provider_backend.get () with | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> DISALLOWED_FIXMES.get filename | Provider_backend.Local_memory { Provider_backend.fixmes; _ } -> Fixme_store.get fixmes.disallowed_fixmes filename | Provider_backend.Decl_service _ -> None in fixme_map_opt |> Option.value ~default:IMap.empty |> IMap.find_opt line |> Option.value ~default:IMap.empty |> IMap.find_opt code let () = (Errors.get_hh_fixme_pos := fun err_pos err_code -> get_fixmes_for_pos err_pos |> fun imap -> if !Errors.code_agnostic_fixme then if IMap.is_empty imap then None else Some err_pos else IMap.find_opt err_code imap); (Errors.is_hh_fixme := fun err_pos err_code -> Option.is_some (!Errors.get_hh_fixme_pos err_pos err_code)); Errors.is_hh_fixme_disallowed := (fun err_pos err_code -> Option.is_some (is_disallowed err_pos err_code))
OCaml Interface
hhvm/hphp/hack/src/providers/fixme_provider.mli
(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type fixme_map = Pos.t IMap.t IMap.t val get_fixmes : Relative_path.t -> fixme_map option val get_hh_fixmes : Relative_path.t -> fixme_map option val get_decl_hh_fixmes : Relative_path.t -> fixme_map option val get_disallowed_fixmes : Relative_path.t -> fixme_map option val get_fixme_codes_for_pos : Pos.t -> ISet.t val get_unused_fixmes : codes:int list -> applied_fixmes:(Relative_path.t Pos.pos * int) list -> fold: ('a -> init:'b list -> f:(Relative_path.t -> 'c -> Pos.t list -> Pos.t list) -> 'd) -> files_info:'a -> 'd val provide_hh_fixmes : Relative_path.t -> fixme_map -> unit val provide_decl_hh_fixmes : Relative_path.t -> fixme_map -> unit val provide_disallowed_fixmes : Relative_path.t -> fixme_map -> unit val remove_batch : Relative_path.Set.t -> unit val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit
OCaml
hhvm/hphp/hack/src/providers/lfu_cache.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude open Option.Monad_infix type size = int module type Entry = sig type _ t type 'a key = 'a t type 'a value = 'a val get_size : key:'a key -> value:'a value -> size val key_to_log_string : 'a key -> string end module RevIMap = Caml.Map.Make (struct type t = int let compare a b = Int.compare b a end) exception Done module Cache (Entry : Entry) = struct type key_wrapper = Key : 'a Entry.key -> key_wrapper type value_wrapper = Value_wrapper : 'a Entry.value -> value_wrapper type entry = { frequency: int ref; value: value_wrapper; } type t = { capacity: size; entries: (key_wrapper, entry) Hashtbl.t; } let make_entry value = { frequency = ref 0; value } let make ~(max_size : size) : t = { capacity = max_size; entries = Hashtbl.Poly.create () } let clear (t : t) : unit = Hashtbl.clear t.entries let length (t : t) : int = Hashtbl.length t.entries (** The collection function is called when we reach twice original capacity in size. When the collection is triggered, we only keep the most frequently used objects. So before collection: size = 2 * capacity After collection: size = capacity (with the most frequently used objects) *) let collect { entries; capacity } = if Hashtbl.length entries < 2 * capacity then () else let sorted_by_freq = (* bucket sort *) Hashtbl.fold ~f:(fun ~key ~data:{ frequency; value } m -> RevIMap.add !frequency ((key, value) :: (RevIMap.find_opt !frequency m |> Option.value ~default:[])) m) entries ~init:RevIMap.empty in Hashtbl.clear entries; try ignore @@ RevIMap.fold (fun _freq values count -> List.fold values ~init:count ~f:(fun count (key, value) -> Hashtbl.set entries ~key ~data:(make_entry value); let count = count + 1 in if count >= capacity then raise Done; count)) sorted_by_freq 0 with | Done -> () let add (type a) (t : t) ~(key : a Entry.key) ~(value : a Entry.value) : unit = collect t; let key = Key key in match Hashtbl.find t.entries key with | Some { frequency; value = Value_wrapper value' } -> incr frequency; if phys_equal (Obj.magic value' : a Entry.value) value then () else Hashtbl.set t.entries ~key ~data:{ frequency; value = Value_wrapper value } | None -> Hashtbl.set t.entries ~key ~data:(make_entry (Value_wrapper value)) let find_or_add (type a) (t : t) ~(key : a Entry.key) ~(default : unit -> a Entry.value option) : a Entry.value option = let entry = Hashtbl.find_and_call t.entries (Key key) ~if_found:(fun { value; frequency } -> incr frequency; Some value) ~if_not_found:(fun _key -> let value_opt = default () in Option.iter value_opt ~f:(fun value -> add t ~key ~value); value_opt >>| fun value -> Value_wrapper value) in match entry with | None -> None | Some (Value_wrapper value) -> (* OCaml [Hashtbl.t] isn't a heterogeneous map. There's no way to indicate that the key and the value type have some relation. Consequently, the [value] we've just retrieved from the hash table has type [$Value_wrapper_'a] but we need one of type ['a], and there's no good way to convince the OCaml compiler that these two types are equivalent. We hope to reduce the danger of this using this cache as a heterogeneous map by having this be the only call to unsafe [Obj] functions, as opposed to having every caller call into [Obj]. (The alternative is to implement a heterogeneous map from scratch, or import a library for one.) *) let value = (Obj.magic value : a Entry.value) in Some value let remove (t : t) ~(key : 'a Entry.key) : unit = Hashtbl.remove t.entries (Key key) let get_telemetry (_ : t) ~(key : string) (telemetry : Telemetry.t) : Telemetry.t = telemetry |> Telemetry.string_ ~key ~value:"LFU telemetry not implemented" let reset_telemetry (_ : t) : unit = () end
OCaml Interface
hhvm/hphp/hack/src/providers/lfu_cache.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type size = int module type Entry = sig (** The key-value pair type of the stored values. This is expected to be a GADT, which allows us to have a type-safe heterogeneous mapping from key type to value type. For example, for storing decls, we want to have different stored value types based on the key type. A [Fun_decl "foo"] should store a [Typing_defs.fun_elt] and a [Typedef_decl "foo"] should store a [Typing_defs.typedef_type]. (Note that functions and typedefs live in different namespaces.) In GADT syntax, this would be: ``` type _ t = | Fun_decl : string -> Typing_defs.fun_elt t | Typedef_decl : string -> Typing_defs.typedef_decl t ``` Then the following is well-typed: ``` let foo_fun : Typing_defs.fun_elt option = Decl_cache.find_or_add cache ~key:(Fun_decl "foo") ~default:(fun () -> None) in let foo_typedef : Typing_defs.typedef_decl option = Decl_cache.find_or_add cache ~key:(Typedef_decl "foo") ~default:(fun () -> None) in *) type _ t (** Helper type alias. ['a Entry.key] should be read as "the key type of the entry key-value pair". *) type 'a key = 'a t (** Helper type alias. ['a Entry.value] should be read as "the value type of the entry key-value pair". *) type 'a value = 'a (** Get the size associated with a key-value pair. For example, you can measure its size in bytes. If the size is always [1], this causes the cache to act as a regular LRU cache. *) val get_size : key:'a key -> value:'a value -> size (** For logging/debugging *) val key_to_log_string : 'a key -> string end module Cache (Entry : Entry) : sig type t (** Construct a new cache which can store up to [max_size] of values. *) val make : max_size:size -> t (** Remove all entries from the cache. *) val clear : t -> unit (** Get the number of elements currently in the cache. *) val length : t -> int (** Add a [key]-[value] pair to the cache. The cache is always resized to fit under the memory limit after any addition operation. Under some circumstances, this could mean that the given [value] is immediately evicted. (For example, if the [value] is greater than the maximum size of the cache, then it must be evicted.) *) val add : t -> key:'a Entry.key -> value:'a Entry.value -> unit (** Find the element with the given [key] in the cache and return the corresponding value. If the [key] is not present, calls [default] to calculate its value, then [add]s it to the cache and returns that value. If [default] returns [None], then returns [None]; otherwise returns the computed value. Note that this could immediately evict the added value, if any was computed by [default] (see note on [add]). *) val find_or_add : t -> key:'a Entry.key -> default:(unit -> 'a Entry.value option) -> 'a Entry.value option (** Remove the entry with the given key from the cache. If the key is not present, does nothing. *) val remove : t -> key:'a Entry.key -> unit (** The cache keeps track of how long it's spent doing cache overhead and how big it is *) val get_telemetry : t -> key:string -> Telemetry.t -> Telemetry.t (** You can reset the timer. *) val reset_telemetry : t -> unit end
OCaml
hhvm/hphp/hack/src/providers/naming_provider.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude open Reordered_argument_collections let db_path_of_ctx (ctx : Provider_context.t) : Naming_sqlite.db_path option = ctx |> Provider_context.get_backend |> Db_path_provider.get_naming_db_path let not_implemented (backend : Provider_backend.t) = failwith ("not implemented for backend: " ^ Provider_backend.t_to_string backend) let attach_name_type_to_tuple (name_type, path) = (FileInfo.File (name_type, path), name_type) let attach_name_type (name_type : FileInfo.name_type) (x : 'a) : 'a * FileInfo.name_type = (x, name_type) let remove_name_type (x : 'a * FileInfo.name_type) : 'a = fst x let kind_to_name_type (kind_of_type : Naming_types.kind_of_type) : FileInfo.name_type = Naming_types.type_kind_to_name_type kind_of_type let name_type_to_kind (name_type : FileInfo.name_type) : Naming_types.kind_of_type = match Naming_types.type_kind_of_name_type name_type with | Some kind_of_type -> kind_of_type | None -> failwith (Printf.sprintf "Unexpected name type %s" (FileInfo.show_name_type name_type)) let find_symbol_in_context ~(ctx : Provider_context.t) ~(get_entry_symbols : FileInfo.t -> (FileInfo.id * FileInfo.name_type) list) ~(is_symbol : string -> bool) : (FileInfo.pos * FileInfo.name_type) option = Provider_context.get_entries ctx |> Relative_path.Map.filter_map ~f:(fun _path entry -> (* CARE! This obtains names from the AST. They're usually similar to what we get from direct-decl-parser (which is what's used to populate the naming table). But they disagree in cases like "namespace N; namespace M {function f(){} }" where AST says "M\f" and direct-decl says "N\M\f". We can therefore end in situations where if you're walking the AST and find a name, and you ask for it, then Naming_provider will tell you it exists (via the Provider_context entry) but Decl_provider will tell you it doesn't. *) let file_info = Ast_provider.compute_file_info ~popt:(Provider_context.get_popt ctx) ~entry in let symbols = get_entry_symbols file_info in List.find_map symbols ~f:(fun ((pos, name, _), kind) -> if is_symbol name then Some (pos, kind) else None)) |> Relative_path.Map.choose_opt |> Option.map ~f:snd let find_const_in_context (ctx : Provider_context.t) (name : string) : (FileInfo.pos * FileInfo.name_type) option = find_symbol_in_context ~ctx ~get_entry_symbols:(fun { FileInfo.consts; _ } -> List.map consts ~f:(attach_name_type FileInfo.Const)) ~is_symbol:(String.equal name) let find_fun_in_context (ctx : Provider_context.t) (name : string) : (FileInfo.pos * FileInfo.name_type) option = find_symbol_in_context ~ctx ~get_entry_symbols:(fun { FileInfo.funs; _ } -> List.map funs ~f:(attach_name_type FileInfo.Fun)) ~is_symbol:(String.equal name) let compute_fun_canon_name ctx path name = let open Option.Monad_infix in let canon_name fd = snd fd.Aast.fd_name in Ast_provider.find_ifun_in_file ctx path name >>| canon_name let find_fun_canon_name_in_context (ctx : Provider_context.t) (name : string) : string option = let name = String.lowercase name in let symbol_opt = find_symbol_in_context ~ctx ~get_entry_symbols:(fun { FileInfo.funs; _ } -> List.map funs ~f:(attach_name_type FileInfo.Fun)) ~is_symbol:(fun symbol_name -> String.equal (Naming_sqlite.to_canon_name_key symbol_name) name) in match symbol_opt with | Some (pos, _name_type) -> compute_fun_canon_name ctx (FileInfo.get_pos_filename pos) name | None -> None let get_entry_symbols_for_type { FileInfo.classes; typedefs; _ } = let classes = List.map classes ~f:(attach_name_type FileInfo.Class) in let typedefs = List.map typedefs ~f:(attach_name_type FileInfo.Typedef) in List.concat [classes; typedefs] let find_type_in_context (ctx : Provider_context.t) (name : string) : (FileInfo.pos * FileInfo.name_type) option = find_symbol_in_context ~ctx ~get_entry_symbols:get_entry_symbols_for_type ~is_symbol:(String.equal name) let compute_type_canon_name ctx path kind name = let open Option.Monad_infix in match kind with | Naming_types.TClass -> Ast_provider.find_iclass_in_file ctx path name >>| fun { Aast.c_name = (_, canon_name); _ } -> canon_name | Naming_types.TTypedef -> Ast_provider.find_itypedef_in_file ctx path name >>| fun { Aast.t_name = (_, canon_name); _ } -> canon_name let find_type_canon_name_in_context (ctx : Provider_context.t) (name : string) : string option = let name = String.lowercase name in let symbol_opt = find_symbol_in_context ~ctx ~get_entry_symbols:get_entry_symbols_for_type ~is_symbol:(fun symbol_name -> String.equal (Naming_sqlite.to_canon_name_key symbol_name) name) in match symbol_opt with | Some (pos, name_type) -> compute_type_canon_name ctx (FileInfo.get_pos_filename pos) (name_type_to_kind name_type) name | None -> None let find_module_in_context (ctx : Provider_context.t) (name : string) : (FileInfo.pos * FileInfo.name_type) option = find_symbol_in_context ~ctx ~get_entry_symbols:(fun { FileInfo.modules; _ } -> List.map modules ~f:(attach_name_type FileInfo.Module)) ~is_symbol:(String.equal name) let get_entry_contents ctx filename = match Relative_path.Map.find_opt (Provider_context.get_entries ctx) filename with | None -> None | Some entry -> let source_text = Ast_provider.compute_source_text ~entry in Some (Full_fidelity_source_text.text source_text) let is_path_in_ctx ~(ctx : Provider_context.t) (path : Relative_path.t) : bool = Relative_path.Map.mem (Provider_context.get_entries ctx) path let is_pos_in_ctx ~(ctx : Provider_context.t) (pos : FileInfo.pos) : bool = is_path_in_ctx ~ctx (FileInfo.get_pos_filename pos) let rust_backend_ctx_proxy (ctx : Provider_context.t) : Rust_provider_backend.ctx_proxy option = if Relative_path.Map.is_empty (Provider_context.get_entries ctx) then None else Some Rust_provider_backend. { get_entry_contents = get_entry_contents ctx; is_pos_in_ctx = is_pos_in_ctx ~ctx; find_fun_canon_name_in_context = find_fun_canon_name_in_context ctx; find_type_canon_name_in_context = find_type_canon_name_in_context ctx; find_const_in_context = find_const_in_context ctx; find_fun_in_context = find_fun_in_context ctx; find_type_in_context = find_type_in_context ctx; find_module_in_context = find_module_in_context ctx; } let find_symbol_in_context_with_suppression ~(ctx : Provider_context.t) ~(find_symbol_in_context : Provider_context.t -> string -> (FileInfo.pos * FileInfo.name_type) option) ~(fallback : unit -> (FileInfo.pos * FileInfo.name_type) option) (name : string) : (FileInfo.pos * FileInfo.name_type) option = let from_context = find_symbol_in_context ctx name in let from_fallback = fallback () in match (from_context, from_fallback) with | (None, None) -> None | (Some (context_pos, context_name_type), None) -> Some (context_pos, context_name_type) | (None, Some (fallback_pos, fallback_name_type)) -> (* If fallback said it thought the symbol was in ctx, but we definitively know that it isn't, then the answer is None. *) if is_pos_in_ctx ~ctx fallback_pos then None else Some (fallback_pos, fallback_name_type) | ( Some (context_pos, context_name_type), Some (fallback_pos, fallback_name_type) ) -> (* The alphabetically first filename wins *) let context_fn = FileInfo.get_pos_filename context_pos in let fallback_fn = FileInfo.get_pos_filename fallback_pos in if Relative_path.compare context_fn fallback_fn <= 0 then (* symbol is either (1) a duplicate in both context and fallback, and context is the winner, or (2) not a duplicate, and both context and fallback claim it to be defined in a file that's part of the context, in which case context wins. This is consistent with the winnor algorithm used by hh_server -- see the comment for [ServerTypeCheck.do_naming]. *) Some (context_pos, context_name_type) else (* symbol is a duplicate in both context and fallback, and fallback is the winner *) Some (fallback_pos, fallback_name_type) let get_and_cache ~(ctx : Provider_context.t) ~(name : 'name) ~(cache : Provider_backend.Reverse_naming_table_delta.pos_or_deleted SMap.t ref) ~(fallback : Naming_sqlite.db_path -> Provider_backend.Reverse_naming_table_delta.pos option) : Provider_backend.Reverse_naming_table_delta.pos option = let open Provider_backend.Reverse_naming_table_delta in match SMap.find_opt !cache name with | Some Deleted -> None | Some (Pos ((name_type, path), _rest)) -> Some (name_type, path) | None -> (match Option.bind (db_path_of_ctx ctx) ~f:fallback with | None -> None | Some (name_type, path) -> cache := SMap.add !cache ~key:name ~data:(Pos ((name_type, path), [])); Some (name_type, path)) let get_const_pos (ctx : Provider_context.t) (name : string) : FileInfo.pos option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Consts.get_pos backend (rust_backend_ctx_proxy ctx) name | _ -> let open Option.Monad_infix in find_symbol_in_context_with_suppression name ~ctx ~find_symbol_in_context:find_const_in_context ~fallback:(fun () -> match Provider_context.get_backend ctx with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Consts.get_pos (db_path_of_ctx ctx) name >>| attach_name_type FileInfo.Const | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.consts ~fallback:(fun db_path -> Naming_sqlite.get_const_path_by_name db_path name |> Option.map ~f:(fun path -> (FileInfo.Const, path))) >>| attach_name_type_to_tuple | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_gconst_path decl name) >>| remove_name_type let const_exists (ctx : Provider_context.t) (name : string) : bool = match Provider_context.get_backend ctx with | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_gconst decl name |> Option.is_some | _ -> get_const_pos ctx name |> Option.is_some let get_const_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = get_const_pos ctx name |> Option.map ~f:FileInfo.get_pos_filename let add_const (backend : Provider_backend.t) (name : string) (pos : FileInfo.pos) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Consts.add name pos | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Consts.add backend name pos | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in let data = Pos ((FileInfo.Const, FileInfo.get_pos_filename pos), []) in reverse_naming_table_delta.consts := SMap.add !(reverse_naming_table_delta.consts) ~key:name ~data | Provider_backend.Decl_service _ as backend -> not_implemented backend let remove_const_batch (backend : Provider_backend.t) (names : string list) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Consts.remove_batch (Db_path_provider.get_naming_db_path backend) names | Provider_backend.Rust_provider_backend rust_backend -> Rust_provider_backend.Naming.Consts.remove_batch rust_backend names | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in reverse_naming_table_delta.consts := List.fold names ~init:!(reverse_naming_table_delta.consts) ~f:(fun acc name -> SMap.add acc ~key:name ~data:Deleted) | Provider_backend.Decl_service _ as backend -> not_implemented backend let get_fun_pos (ctx : Provider_context.t) (name : string) : FileInfo.pos option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Funs.get_pos backend (rust_backend_ctx_proxy ctx) name | _ -> let open Option.Monad_infix in find_symbol_in_context_with_suppression name ~ctx ~find_symbol_in_context:find_fun_in_context ~fallback:(fun () -> match Provider_context.get_backend ctx with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Funs.get_pos (db_path_of_ctx ctx) name >>| attach_name_type FileInfo.Fun | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.funs ~fallback:(fun db_path -> Naming_sqlite.get_fun_path_by_name db_path name |> Option.map ~f:(fun path -> (FileInfo.Fun, path))) >>| attach_name_type_to_tuple | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_fun_path decl name) >>| remove_name_type let fun_exists (ctx : Provider_context.t) (name : string) : bool = match Provider_context.get_backend ctx with | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_fun decl name |> Option.is_some | _ -> get_fun_pos ctx name |> Option.is_some let get_fun_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = get_fun_pos ctx name |> Option.map ~f:FileInfo.get_pos_filename let get_fun_canon_name (ctx : Provider_context.t) (name : string) : string option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Funs.get_canon_name backend (rust_backend_ctx_proxy ctx) name | _ -> let open Option.Monad_infix in let name = String.lowercase name in (match find_fun_canon_name_in_context ctx name with | Some _ as name_opt -> name_opt | None -> (match Provider_context.get_backend ctx with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (* NB: as written, this code may return a canon name even when the given symbol has been deleted in a context entry. We're relying on the caller to have called `remove_fun_batch` on any deleted symbols before having called this function. `get_fun_canon_name` is only called in some functions in `Naming_global`, which expects the caller to have called `Naming_global.remove_decls` already. *) Naming_heap.Funs.get_canon_name ctx name | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.funs_canon_key ~fallback:(fun db_path -> Naming_sqlite.get_ifun_path_by_name db_path name |> Option.map ~f:(fun path -> (FileInfo.Fun, path))) >>= fun (_name_type, path) -> (* If reverse_naming_table_delta thought the symbol was in ctx, but we definitively know that it isn't, then it isn't. *) if is_path_in_ctx ~ctx path then None else compute_fun_canon_name ctx path name | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_fun_canon_name decl name)) let add_fun (backend : Provider_backend.t) (name : string) (pos : FileInfo.pos) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Funs.add name pos | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Funs.add backend name pos | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in let data = Pos ((FileInfo.Fun, FileInfo.get_pos_filename pos), []) in reverse_naming_table_delta.funs := SMap.add !(reverse_naming_table_delta.funs) ~key:name ~data; reverse_naming_table_delta.funs_canon_key := SMap.add !(reverse_naming_table_delta.funs_canon_key) ~key:(Naming_sqlite.to_canon_name_key name) ~data | Provider_backend.Decl_service _ -> (* Do nothing. All naming table updates are expected to have happened already--we should have sent a control request to the decl service asking it to update in response to the list of changed files. *) () let remove_fun_batch (backend : Provider_backend.t) (names : string list) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Funs.remove_batch (Db_path_provider.get_naming_db_path backend) names | Provider_backend.Rust_provider_backend rust_backend -> Rust_provider_backend.Naming.Funs.remove_batch rust_backend names | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in reverse_naming_table_delta.funs := List.fold names ~init:!(reverse_naming_table_delta.funs) ~f:(fun acc name -> SMap.add acc ~key:name ~data:Deleted); reverse_naming_table_delta.funs_canon_key := List.fold names ~init:!(reverse_naming_table_delta.funs_canon_key) ~f:(fun acc name -> SMap.add acc ~key:(Naming_sqlite.to_canon_name_key name) ~data:Deleted) | Provider_backend.Decl_service _ as backend -> (* Removing cache items is not the responsibility of hh_worker. *) not_implemented backend let add_type (backend : Provider_backend.t) (name : string) (pos : FileInfo.pos) (kind : Naming_types.kind_of_type) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Types.add name (pos, kind) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Types.add backend name (pos, kind) | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in let data = Pos ((kind_to_name_type kind, FileInfo.get_pos_filename pos), []) in reverse_naming_table_delta.types := SMap.add !(reverse_naming_table_delta.types) ~key:name ~data; reverse_naming_table_delta.types_canon_key := SMap.add !(reverse_naming_table_delta.types_canon_key) ~key:(Naming_sqlite.to_canon_name_key name) ~data | Provider_backend.Decl_service _ -> (* Do nothing. Naming table updates should be done already. *) () let remove_type_batch (backend : Provider_backend.t) (names : string list) : unit = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Types.remove_batch (Db_path_provider.get_naming_db_path backend) names | Provider_backend.Rust_provider_backend rust_backend -> Rust_provider_backend.Naming.Types.remove_batch rust_backend names | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in reverse_naming_table_delta.types := List.fold names ~init:!(reverse_naming_table_delta.types) ~f:(fun acc name -> SMap.add acc ~key:name ~data:Deleted); reverse_naming_table_delta.types_canon_key := List.fold names ~init:!(reverse_naming_table_delta.types_canon_key) ~f:(fun acc name -> SMap.add acc ~key:(Naming_sqlite.to_canon_name_key name) ~data:Deleted) | Provider_backend.Decl_service _ as backend -> (* Removing cache items is not the responsibility of hh_worker. *) not_implemented backend let get_type_pos_and_kind (ctx : Provider_context.t) (name : string) : (FileInfo.pos * Naming_types.kind_of_type) option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Types.get_pos backend (rust_backend_ctx_proxy ctx) name | _ -> let open Option.Monad_infix in find_symbol_in_context_with_suppression name ~ctx ~find_symbol_in_context:find_type_in_context ~fallback:(fun () -> match Provider_context.get_backend ctx with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Types.get_pos (db_path_of_ctx ctx) name >>| fun (pos, kind) -> (pos, kind_to_name_type kind) | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.types ~fallback:(fun db_path -> Naming_sqlite.get_type_path_by_name db_path name |> Option.map ~f:(fun (path, kind) -> (kind_to_name_type kind, path))) >>| fun (name_type, path) -> (FileInfo.File (name_type, path), name_type) | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_type_path decl name) >>| fun (pos, name_type) -> (pos, name_type_to_kind name_type) let get_type_pos (ctx : Provider_context.t) (name : string) : FileInfo.pos option = match get_type_pos_and_kind ctx name with | Some (pos, _kind) -> Some pos | None -> None let get_type_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = match get_type_pos_and_kind ctx name with | Some (pos, _kind) -> Some (FileInfo.get_pos_filename pos) | None -> None let get_type_path_and_kind (ctx : Provider_context.t) (name : string) : (Relative_path.t * Naming_types.kind_of_type) option = match get_type_pos_and_kind ctx name with | Some (pos, kind) -> Some (FileInfo.get_pos_filename pos, kind) | None -> None let get_type_kind (ctx : Provider_context.t) (name : string) : Naming_types.kind_of_type option = match Provider_context.get_backend ctx with | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_type_kind decl name | _ -> (match get_type_pos_and_kind ctx name with | Some (_pos, kind) -> Some kind | None -> None) let get_type_canon_name (ctx : Provider_context.t) (name : string) : string option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Types.get_canon_name backend (rust_backend_ctx_proxy ctx) name | _ -> let name = String.lowercase name in (match find_type_canon_name_in_context ctx name with | Some _ as name_opt -> name_opt | None -> (match Provider_context.get_backend ctx with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (* NB: as written, this code may return a canon name even when the given symbol has been deleted in a context entry. We're relying on the caller to have called `remove_fun_batch` on any deleted symbols before having called this function. `get_type_canon_name` is only called in some functions in `Naming_global`, which expects the caller to have called `Naming_global.remove_decls` already. *) Naming_heap.Types.get_canon_name ctx name | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Option.Monad_infix in let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.types_canon_key ~fallback:(fun db_path -> Naming_sqlite.get_itype_path_by_name db_path name |> Option.map ~f:(fun (path, kind) -> (kind_to_name_type kind, path))) >>= fun (name_type, path) -> (* If reverse_naming_table_delta thought the symbol was in ctx, but we definitively know that it isn't, then it isn't. *) if is_path_in_ctx ~ctx path then None else compute_type_canon_name ctx path (name_type_to_kind name_type) name | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_type_canon_name decl name)) let get_class_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = match get_type_path_and_kind ctx name with | Some (fn, Naming_types.TClass) -> Some fn | Some (_, Naming_types.TTypedef) | None -> None let add_class (backend : Provider_backend.t) (name : string) (pos : FileInfo.pos) : unit = add_type backend name pos Naming_types.TClass let get_typedef_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = (* This function is used even for code that typechecks clean, in order to judge whether an opaque typedef is visible (which it is only in the file being typechecked *) match Provider_context.get_backend ctx with | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Positioned.rpc_get_typedef_path decl name | _ -> (match get_type_path_and_kind ctx name with | Some (fn, Naming_types.TTypedef) -> Some fn | Some (_, Naming_types.TClass) | None -> None) let add_typedef (backend : Provider_backend.t) (name : string) (pos : FileInfo.pos) : unit = add_type backend name pos Naming_types.TTypedef let get_module_pos (ctx : Provider_context.t) (name : string) : FileInfo.pos option = match Provider_context.get_backend ctx with | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Modules.get_pos backend (rust_backend_ctx_proxy ctx) name | _ -> let open Option.Monad_infix in find_symbol_in_context_with_suppression name ~ctx ~find_symbol_in_context:find_module_in_context ~fallback:(fun () -> match Provider_context.get_backend ctx with | Provider_backend.Analysis | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Modules.get_pos (db_path_of_ctx ctx) name >>| attach_name_type FileInfo.Module | Provider_backend.Rust_provider_backend _ -> failwith "unreachable" | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in get_and_cache ~ctx ~name ~cache:reverse_naming_table_delta.modules ~fallback:(fun db_path -> Naming_sqlite.get_module_path_by_name db_path name |> Option.map ~f:(fun path -> (FileInfo.Module, path))) >>| attach_name_type_to_tuple | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.Slow.rpc_get_module_path decl name) >>| remove_name_type let get_module_path (ctx : Provider_context.t) (name : string) : Relative_path.t option = get_module_pos ctx name |> Option.map ~f:FileInfo.get_pos_filename let module_exists (ctx : Provider_context.t) (name : string) : bool = match Provider_context.get_backend ctx with | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_module decl name |> Option.is_some | _ -> get_module_pos ctx name |> Option.is_some let add_module backend name pos = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Modules.add name pos | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.Modules.add backend name pos | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in let data = Pos ((FileInfo.Module, FileInfo.get_pos_filename pos), []) in reverse_naming_table_delta.modules := SMap.add !(reverse_naming_table_delta.modules) ~key:name ~data | Provider_backend.Decl_service _ as backend -> not_implemented backend let remove_module_batch backend names = match backend with | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> Naming_heap.Modules.remove_batch (Db_path_provider.get_naming_db_path backend) names | Provider_backend.Rust_provider_backend rust_backend -> Rust_provider_backend.Naming.Modules.remove_batch rust_backend names | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta; _ } -> let open Provider_backend.Reverse_naming_table_delta in reverse_naming_table_delta.modules := List.fold names ~init:!(reverse_naming_table_delta.modules) ~f:(fun acc name -> SMap.add acc ~key:name ~data:Deleted) | Provider_backend.Decl_service _ as backend -> (* Removing cache items is not the responsibility of hh_worker. *) not_implemented backend let resolve_position : Provider_context.t -> Pos_or_decl.t -> Pos.t = fun ctx pos -> match Pos_or_decl.get_raw_pos_or_decl_reference pos with | `Raw pos -> pos | `Decl_ref decl -> let filename = (match decl with | Decl_reference.Function name -> get_fun_path ctx name | Decl_reference.Type name -> get_type_path ctx name | Decl_reference.GlobalConstant name -> get_const_path ctx name | Decl_reference.Module name -> get_module_path ctx name) |> Option.value ~default:Relative_path.default (* TODO: what to do if decl not found? *) in Pos_or_decl.fill_in_filename filename pos let get_module_full_pos_by_parsing_file ctx (pos, name) = match pos with | FileInfo.Full p -> Some p | FileInfo.File (FileInfo.Module, fn) -> Ast_provider.find_module_in_file ctx fn name ~full:false |> Option.map ~f:(fun md -> fst md.Aast.md_name) | FileInfo.(File ((Fun | Class | Typedef | Const), _fn)) -> None let get_const_full_pos_by_parsing_file ctx (pos, name) = match pos with | FileInfo.Full p -> Some p | FileInfo.File (FileInfo.Const, fn) -> Ast_provider.find_gconst_in_file ctx fn name ~full:false |> Option.map ~f:(fun ast -> fst ast.Aast.cst_name) | FileInfo.(File ((Fun | Class | Typedef | Module), _fn)) -> None let get_fun_full_pos_by_parsing_file ctx (pos, name) = match pos with | FileInfo.Full p -> Some p | FileInfo.File (FileInfo.Fun, fn) -> Ast_provider.find_fun_in_file ctx fn name ~full:false |> Option.map ~f:(fun fd -> fst fd.Aast.fd_name) | FileInfo.(File ((Class | Typedef | Const | Module), _fn)) -> None let get_type_full_pos_by_parsing_file ctx (pos, name) = match pos with | FileInfo.Full p -> Some p | FileInfo.File (name_type, fn) -> (match name_type with | FileInfo.Class -> Ast_provider.find_class_in_file ctx fn name ~full:false |> Option.map ~f:(fun ast -> fst ast.Aast.c_name) | FileInfo.Typedef -> Ast_provider.find_typedef_in_file ctx fn name ~full:false |> Option.map ~f:(fun ast -> fst ast.Aast.t_name) | FileInfo.(Fun | Const | Module) -> None) (** This removes the name->path mapping from the naming table (i.e. the combination of sqlite and delta). It is an error to call this method unless name->path exists. We enforce this with exceptions in some cases where it's cheap enough to verify, but not in others where enforcing it would involve a sqlite read. Invariant: this never transitions an entry from Some to None. *) let remove ~(case_insensitive : bool) (delta : Provider_backend.Reverse_naming_table_delta.pos_or_deleted SMap.t) (path : Relative_path.t) (name : string) : Provider_backend.Reverse_naming_table_delta.pos_or_deleted SMap.t = let open Provider_backend.Reverse_naming_table_delta in let name = if case_insensitive then Naming_sqlite.to_canon_name_key name else name in match SMap.find_opt delta name with | None -> (* We've never yet read/cached from sqlite. Presumably the caller is removing the name->path mapping that we assume is present in sqlite. We could read from sqlite right now solely to verify that the user-supplied path matches the one that's in sqlite, but that'd be costly and doesn't seem worth it. *) SMap.add delta ~key:name ~data:Deleted | Some Deleted -> failwith "removing symbol that's already removed" | Some (Pos ((_name_type, old_path), [])) -> if not (Relative_path.equal path old_path) then failwith (Printf.sprintf "Naming_provider invariant failed: symbol %s was in %s, but we're trying to remove %s" name (Relative_path.to_absolute old_path) (Relative_path.to_absolute path)); SMap.add delta ~key:name ~data:Deleted | Some (Pos ((_name_type, old_path), rest_hd :: rest_tl)) -> if Relative_path.equal path old_path then SMap.add delta ~key:name ~data:(Pos (rest_hd, rest_tl)) else let rest = List.filter (rest_hd :: rest_tl) ~f:(fun (_name_type, rest_path) -> not (Relative_path.equal path rest_path)) in if List.length rest <> List.length rest_tl then failwith (Printf.sprintf "Naming_provider invariant failed: symbol %s was in several files, but we're trying to remove %s which isn't one of them" name (Relative_path.to_absolute path)); SMap.add delta ~key:name ~data:(Pos ((_name_type, old_path), rest)) (** This adds name->path to the naming table (i.e. the combination of sqlite+delta). Invariant: if this function causes the delta for this symbol to go from None->Some, then the result will include the name->path mapping that was present in sqlite (if any), in addition to the name->path mapping that we wish to add right now. *) let add ~(case_insensitive : bool) (db_path : Naming_sqlite.db_path option) (delta : Provider_backend.Reverse_naming_table_delta.pos_or_deleted SMap.t) (pos : Provider_backend.Reverse_naming_table_delta.pos) (name : string) : Provider_backend.Reverse_naming_table_delta.pos_or_deleted SMap.t = let open Provider_backend.Reverse_naming_table_delta in let name = if case_insensitive then Naming_sqlite.to_canon_name_key name else name in match (SMap.find_opt delta name, db_path) with | (None, None) -> SMap.add delta ~key:name ~data:(Pos (pos, [])) | (None, Some db_path) -> let (name_type, _) = pos in let sqlite_pos = match name_type with | FileInfo.Const -> Option.map (Naming_sqlite.get_const_path_by_name db_path name) ~f:(fun sqlite_path -> (FileInfo.Const, sqlite_path)) | FileInfo.Fun -> let pos = if case_insensitive then Naming_sqlite.get_ifun_path_by_name db_path name else Naming_sqlite.get_fun_path_by_name db_path name in Option.map pos ~f:(fun sqlite_path -> (FileInfo.Fun, sqlite_path)) | FileInfo.Module -> let pos = Naming_sqlite.get_module_path_by_name db_path name in Option.map pos ~f:(fun sqlite_path -> (FileInfo.Module, sqlite_path)) | FileInfo.Class | FileInfo.Typedef -> let pos = if case_insensitive then Naming_sqlite.get_itype_path_by_name db_path name else Naming_sqlite.get_type_path_by_name db_path name in Option.map pos ~f:(fun (sqlite_path, sqlite_kind) -> (kind_to_name_type sqlite_kind, sqlite_path)) in let data = match sqlite_pos with | None -> Pos (pos, []) | Some sqlite_pos -> Pos (sqlite_pos, [pos]) in SMap.add delta ~key:name ~data | (Some Deleted, _) -> SMap.add delta ~key:name ~data:(Pos (pos, [])) | (Some (Pos (old_pos, rest)), _) -> SMap.add delta ~key:name ~data:(Pos (old_pos, pos :: rest)) let update ~(backend : Provider_backend.t) ~(path : Relative_path.t) ~(old_file_info : FileInfo.t option) ~(new_file_info : FileInfo.t option) : unit = let open FileInfo in let strip_positions symbols = List.map symbols ~f:(fun (_, x, _) -> x) in match backend with | Provider_backend.Decl_service _ -> not_implemented backend | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Rust_provider_backend _ | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Shared_memory -> (* Remove old entries *) Option.iter old_file_info ~f:(fun old_file_info -> remove_type_batch backend (strip_positions old_file_info.classes); remove_type_batch backend (strip_positions old_file_info.typedefs); remove_fun_batch backend (strip_positions old_file_info.funs); remove_const_batch backend (strip_positions old_file_info.consts); remove_module_batch backend (strip_positions old_file_info.modules)); (* Add new entries. Note: the caller is expected to have a solution for duplicate names. Note: can't use [Naming_global.ndecl_file_skip_if_already_bound] because it attempts to look up the symbol by doing a file parse, but we have to use the file_info we're given to avoid races. *) Option.iter new_file_info ~f:(fun new_file_info -> List.iter new_file_info.funs ~f:(fun (pos, name, _) -> add_fun backend name pos); List.iter new_file_info.classes ~f:(fun (pos, name, _) -> add_class backend name pos); List.iter new_file_info.typedefs ~f:(fun (pos, name, _) -> add_typedef backend name pos); List.iter new_file_info.consts ~f:(fun (pos, name, _) -> add_const backend name pos); List.iter new_file_info.modules ~f:(fun (pos, name, _) -> add_module backend name pos)); () | Provider_backend.Local_memory { Provider_backend.reverse_naming_table_delta = deltas; naming_db_path_ref; _; } -> let open Provider_backend.Reverse_naming_table_delta in (* helper*) let update ?(case_insensitive = false) olds news delta name_type = (* The following code has a bug. Given "olds/news", it calculates "added/removed" based on case-sensitive comparison. That's straightforwardly correct for our case-sensitives maps. But how does it work for our case-insensitive maps? e.g. olds={Aa,aA}, news={aa}. Therefore added={aa}, removed={Aa,aA} because we calculated these case-sensitively. (1) it removes the lowercase version of "Aa" (2) it removes the lowercase version of "aA" <-- failwith (3) it adds the lowercase version of "aa" Correctness requires that removal is idempotent, and that we do adds after removes. Unfortunately removal currently isn't idempotent; if fails if you try to remove the same thing twice. *) let olds = strip_positions olds in let news = strip_positions news in let olds_s = SSet.of_list olds in let news_s = SSet.of_list news in let removed = SSet.diff olds_s news_s in let added = SSet.diff news_s olds_s in SSet.iter removed ~f:(fun name -> delta := remove ~case_insensitive !delta path name); SSet.iter added ~f:(fun name -> delta := add !naming_db_path_ref ~case_insensitive !delta (name_type, path) name); () in (* do the update *) let oldfi = Option.value old_file_info ~default:FileInfo.empty_t in let newfi = Option.value new_file_info ~default:FileInfo.empty_t in update oldfi.funs newfi.funs deltas.funs FileInfo.Fun; update oldfi.consts newfi.consts deltas.consts FileInfo.Const; update oldfi.classes newfi.classes deltas.types FileInfo.Class; update oldfi.typedefs newfi.typedefs deltas.types FileInfo.Typedef; update oldfi.modules newfi.modules deltas.modules FileInfo.Module; (* update canon names too *) let updatei = update ~case_insensitive:true in updatei oldfi.funs newfi.funs deltas.funs_canon_key FileInfo.Fun; updatei oldfi.classes newfi.classes deltas.types_canon_key FileInfo.Class; updatei oldfi.typedefs newfi.typedefs deltas.types_canon_key FileInfo.Typedef; () let local_changes_push_sharedmem_stack () : unit = Naming_heap.push_local_changes () let local_changes_pop_sharedmem_stack () : unit = Naming_heap.pop_local_changes () let get_files ctx deps = match Provider_context.get_backend ctx with | Provider_backend.Shared_memory -> Naming_heap.get_filenames_by_hash (db_path_of_ctx ctx) deps | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Naming.get_filenames_by_hash backend deps | backend -> let desc = Printf.sprintf "dephash_lookup_%s" (Provider_backend.t_to_string backend) in Hh_logger.log "INVARIANT_VIOLATION_BUG [%s]" desc; HackEventLogger.invariant_violation_bug desc; failwith "need_update_files"
OCaml Interface
hhvm/hphp/hack/src/providers/naming_provider.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** Determine whether a global constant with the given name is declared in the reverse naming table. *) val const_exists : Provider_context.t -> string -> bool (** Look up the file path at which the given global constant was declared in the reverse naming table. *) val get_const_path : Provider_context.t -> string -> Relative_path.t option (** Look up the position at which the given global constant was declared in the reverse naming table. *) val get_const_pos : Provider_context.t -> string -> FileInfo.pos option (** Resolve the given name+FileInfo.pos (which might only have filename) into an actual position, by parsing the AST if necessary *) val get_const_full_pos_by_parsing_file : Provider_context.t -> FileInfo.pos * string -> Pos.t option (** Record that a global constant with the given name was declared at the given position. *) val add_const : Provider_backend.t -> string -> FileInfo.pos -> unit (** Remove all global constants with the given names from the reverse naming table. *) val remove_const_batch : Provider_backend.t -> string list -> unit (** Determine whether a global function with the given name is declared in the reverse naming table. *) val fun_exists : Provider_context.t -> string -> bool (** Look up the file path in which the given global function was declared in the reverse naming table. *) val get_fun_path : Provider_context.t -> string -> Relative_path.t option (** Look up the position at which the given global function was declared in the reverse naming table. *) val get_fun_pos : Provider_context.t -> string -> FileInfo.pos option (** Resolve the given name+FileInfo.pos (which might only have filename) into an actual position, by parsing the AST if necessary *) val get_fun_full_pos_by_parsing_file : Provider_context.t -> FileInfo.pos * string -> Pos.t option (** Look up the canonical name for the given global function. THIS IS A BAD API. The reverse-naming-table should solely be a multimap from symbol name (maybe case insensitive) to filename+type. That's what the other APIs here do. But this API requires us to read the filename and parse it to return the canon name. Moreover, one form of storage (SQL) only stores filenames, while another form of storage (sharedmem) only stores canonical names, which means we can't easily clean up this API. *) val get_fun_canon_name : Provider_context.t -> string -> string option (** Record that a global function with the given name was declared at the given position. *) val add_fun : Provider_backend.t -> string -> FileInfo.pos -> unit (** Remove all global functions with the given names from the reverse naming table. *) val remove_fun_batch : Provider_backend.t -> string list -> unit (** Record that a type (one of [Naming_types.kind_of_type] was declared at the given position. These types all live in the same namespace, unlike functions and constants. *) val add_type : Provider_backend.t -> string -> FileInfo.pos -> Naming_types.kind_of_type -> unit (** Remove all types with the given names from the reverse naming table. *) val remove_type_batch : Provider_backend.t -> string list -> unit (** Look up the position at which the given type was declared in the reverse naming table. *) val get_type_pos : Provider_context.t -> string -> FileInfo.pos option (** Resolve the given name+FileInfo.pos (which might only have filename) into an actual position, by parsing the AST if necessary *) val get_type_full_pos_by_parsing_file : Provider_context.t -> FileInfo.pos * string -> Pos.t option (** Look up the file path declaring the given type in the reverse naming table. *) val get_type_path : Provider_context.t -> string -> Relative_path.t option (** Look up the kind with which the given type was declared in the reverse naming table. *) val get_type_kind : Provider_context.t -> string -> Naming_types.kind_of_type option (** Look up the position and kind with which the given type was declared in the reverse naming table. *) val get_type_pos_and_kind : Provider_context.t -> string -> (FileInfo.pos * Naming_types.kind_of_type) option (** Look up the path and kind with which the given type was declared in the reverse naming table. *) val get_type_path_and_kind : Provider_context.t -> string -> (Relative_path.t * Naming_types.kind_of_type) option (** Look up the canonical name for the given type. THIS IS A BAD API. The reverse-naming-table should solely be a multimap from symbol name (maybe case insensitive) to filename+type. That's what the other APIs here do. But this API requires us to read the filename and parse it to return the canon name. Moreover, one form of storage (SQL) only stores filenames, while another form of storage (sharedmem) only stores canonical names, which means we can't easily clean up this API. *) val get_type_canon_name : Provider_context.t -> string -> string option (** Look up the file path declaring the given class in the reverse naming table. Same as calling [get_type_pos] and extracting the path if the result is a [Naming_types.TClass]. *) val get_class_path : Provider_context.t -> string -> Relative_path.t option (** Record that a class with the given name was declared at the given position. Same as calling [add_type] with [Naming_types.TClass]. *) val add_class : Provider_backend.t -> string -> FileInfo.pos -> unit (** Look up the file path declaring the given class in the reverse naming table. Same as calling [get_type_pos] and extracting the path if the result is a [Naming_types.TTypedef]. *) val get_typedef_path : Provider_context.t -> string -> Relative_path.t option (** Record that a class with the given name was declared at the given position. Same as calling [add_type] with [Naming_types.TTypedef]. *) val add_typedef : Provider_backend.t -> string -> FileInfo.pos -> unit (** Updates the reverse naming table based on old+new names in this file *) val update : backend:Provider_backend.t -> path:Relative_path.t -> old_file_info:FileInfo.t option -> new_file_info:FileInfo.t option -> unit val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit (** Resolve a decl position to a raw position using a provider context. *) val resolve_position : Provider_context.t -> Pos_or_decl.t -> Pos.t val get_files : Provider_context.t -> Typing_deps.DepSet.t -> Relative_path.Set.t (** Resolve the given name & FileInfo.pos into an actual position*) val get_module_full_pos_by_parsing_file : Provider_context.t -> FileInfo.pos * string -> Pos.t option val get_module_pos : Provider_context.t -> string -> FileInfo.pos option val get_module_path : Provider_context.t -> string -> Relative_path.t option val module_exists : Provider_context.t -> string -> bool val add_module : Provider_backend.t -> string -> FileInfo.pos -> unit val remove_module_batch : Provider_backend.t -> string list -> unit val rust_backend_ctx_proxy : Provider_context.t -> Rust_provider_backend.ctx_proxy option val get_entry_contents : Provider_context.t -> Relative_path.t -> string option
OCaml
hhvm/hphp/hack/src/providers/provider_backend.ml
(* * Copyright (c) 2019, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude type pessimisation_info = { pessimise_shallow_class: Relative_path.t -> name:string -> Shallow_decl_defs.shallow_class -> Shallow_decl_defs.shallow_class; pessimise_fun: Relative_path.t -> name:string -> Typing_defs.fun_elt -> Typing_defs.fun_elt; pessimise_gconst: Relative_path.t -> name:string -> Typing_defs.const_decl -> Typing_defs.const_decl; pessimise_typedef: Relative_path.t -> name:string -> Typing_defs.typedef_type -> Typing_defs.typedef_type; allow_ast_caching: bool; store_pessimised_result: bool; } module Decl_cache_entry = struct (* NOTE: we can't simply use a string as a key. In the case of a name conflict, we may put e.g. a function named 'foo' into the cache whose value is one type, and then later try to withdraw a class named 'foo' whose value is another type. *) type _ t = | Fun_decl : string -> Typing_defs.fun_elt t | Class_decl : string -> Typing_class_types.class_t t | Typedef_decl : string -> Typing_defs.typedef_type t | Gconst_decl : string -> Typing_defs.const_decl t | Module_decl : string -> Typing_defs.module_def_type t type 'a key = 'a t type 'a value = 'a let get_size ~key:_ ~value:_ = 1 let key_to_log_string : type a. a key -> string = fun key -> match key with | Fun_decl s -> "FunDecl" ^ s | Class_decl s -> "ClassDecl" ^ s | Typedef_decl s -> "TypedefDecl" ^ s | Gconst_decl s -> "GconstDecl" ^ s | Module_decl s -> "ModuleDecl" ^ s end module Cache (Entry : Lfu_cache.Entry) = Lfu_cache.Cache (Entry) module Decl_cache = Cache (Decl_cache_entry) module Shallow_decl_cache_entry = struct type _ t = Shallow_class_decl : string -> Shallow_decl_defs.shallow_class t type 'a key = 'a t type 'a value = 'a let get_size ~key:_ ~value:_ = 1 let key_to_log_string : type a. a key -> string = (fun (Shallow_class_decl key) -> "ClasssShallow" ^ key) end module Shallow_decl_cache = Cache (Shallow_decl_cache_entry) module Folded_class_cache_entry = struct type _ t = Folded_class_decl : string -> Decl_defs.decl_class_type t type 'a key = 'a t type 'a value = 'a let get_size ~key:_ ~value:_ = 1 let key_to_log_string : type a. a key -> string = (fun (Folded_class_decl key) -> "ClassFolded" ^ key) end module Folded_class_cache = Cache (Folded_class_cache_entry) type fixme_map = Pos.t IMap.t IMap.t [@@deriving show] module Fixme_store = struct type t = fixme_map Relative_path.Map.t ref let empty () = ref Relative_path.Map.empty let get t filename = Relative_path.Map.find_opt !t filename let add t filename fixmes = t := Relative_path.Map.add !t ~key:filename ~data:fixmes let remove t filename = t := Relative_path.Map.remove !t filename let remove_batch t filenames = t := Relative_path.Set.fold filenames ~init:!t ~f:(fun filename map -> Relative_path.Map.remove map filename) end module Fixmes = struct type t = { hh_fixmes: Fixme_store.t; decl_hh_fixmes: Fixme_store.t; disallowed_fixmes: Fixme_store.t; } let get_telemetry ~(key : string) (t : t) (telemetry : Telemetry.t) : Telemetry.t = let hh_fixme_files = Relative_path.Map.cardinal !(t.hh_fixmes) in let decl_hh_fixme_files = Relative_path.Map.cardinal !(t.decl_hh_fixmes) in let disallowed_fixme_files = Relative_path.Map.cardinal !(t.disallowed_fixmes) in if hh_fixme_files + decl_hh_fixme_files + disallowed_fixme_files = 0 then telemetry else let sub_telemetry = Telemetry.create () |> Telemetry.int_ ~key:"hh_fixme_files" ~value:hh_fixme_files |> Telemetry.int_ ~key:"decl_hh_fixme_files" ~value:decl_hh_fixme_files |> Telemetry.int_ ~key:"disallowed_fixme_files" ~value:disallowed_fixme_files in Telemetry.object_ telemetry ~key ~value:sub_telemetry end let empty_fixmes = Fixmes. { hh_fixmes = Fixme_store.empty (); decl_hh_fixmes = Fixme_store.empty (); disallowed_fixmes = Fixme_store.empty (); } module Reverse_naming_table_delta = struct type pos = FileInfo.name_type * Relative_path.t type pos_or_deleted = | Pos of pos * pos list | Deleted type t = { consts: pos_or_deleted SMap.t ref; funs: pos_or_deleted SMap.t ref; types: pos_or_deleted SMap.t ref; modules: pos_or_deleted SMap.t ref; funs_canon_key: pos_or_deleted SMap.t ref; types_canon_key: pos_or_deleted SMap.t ref; } let make () : t = { consts = ref SMap.empty; funs = ref SMap.empty; types = ref SMap.empty; modules = ref SMap.empty; funs_canon_key = ref SMap.empty; types_canon_key = ref SMap.empty; } let get_telemetry ~(key : string) (t : t) (telemetry : Telemetry.t) : Telemetry.t = let consts = SMap.cardinal !(t.consts) in let funs = SMap.cardinal !(t.funs) in let types = SMap.cardinal !(t.types) in let modules = SMap.cardinal !(t.modules) in if consts + funs + types + modules = 0 then telemetry else let sub_telemetry = Telemetry.create () |> Telemetry.int_ ~key:"consts" ~value:consts |> Telemetry.int_ ~key:"funs" ~value:funs |> Telemetry.int_ ~key:"types" ~value:types |> Telemetry.int_ ~key:"modules" ~value:modules in Telemetry.object_ telemetry ~key ~value:sub_telemetry end type local_memory = { shallow_decl_cache: Shallow_decl_cache.t; folded_class_cache: Folded_class_cache.t; decl_cache: Decl_cache.t; reverse_naming_table_delta: Reverse_naming_table_delta.t; fixmes: Fixmes.t; naming_db_path_ref: Naming_sqlite.db_path option ref; } type t = | Shared_memory | Pessimised_shared_memory of pessimisation_info | Local_memory of local_memory | Decl_service of { decl: Decl_service_client.t; fixmes: Fixmes.t; } | Rust_provider_backend of Rust_provider_backend.t | Analysis let t_to_string (t : t) : string = match t with | Shared_memory -> "Shared_memory" | Pessimised_shared_memory _ -> "Pessimised_shared_memory" | Local_memory _ -> "Local_memory" | Decl_service _ -> "Decl_service" | Rust_provider_backend _ -> "Rust_provider_backend" | Analysis -> "Analysis" let backend_ref = ref Shared_memory let set_analysis_backend () : unit = backend_ref := Analysis let set_shared_memory_backend () : unit = backend_ref := Shared_memory; Decl_store.set Decl_store.shared_memory_store; () let set_pessimised_shared_memory_backend info : unit = backend_ref := Pessimised_shared_memory info; Decl_store.set Decl_store.shared_memory_store; () let set_rust_backend popt : unit = backend_ref := Rust_provider_backend (Rust_provider_backend.make popt) let set_custom_rust_backend backend : unit = Rust_provider_backend.set backend; backend_ref := Rust_provider_backend backend let make_decl_store_from_local_memory ({ decl_cache; folded_class_cache; _ } : local_memory) : Decl_store.decl_store = { Decl_store.add_class = (fun k v -> Folded_class_cache.add folded_class_cache ~key:(Folded_class_cache_entry.Folded_class_decl k) ~value:v); get_class = (fun k : Decl_defs.decl_class_type option -> Folded_class_cache.find_or_add folded_class_cache ~key:(Folded_class_cache_entry.Folded_class_decl k) ~default:(fun _ -> None)); add_typedef = (fun k v -> Decl_cache.add decl_cache ~key:(Decl_cache_entry.Typedef_decl k) ~value:v); get_typedef = (fun k -> Decl_cache.find_or_add decl_cache ~key:(Decl_cache_entry.Typedef_decl k) ~default:(fun _ -> None)); add_module = (fun k v -> Decl_cache.add decl_cache ~key:(Decl_cache_entry.Module_decl k) ~value:v); get_module = (fun k -> Decl_cache.find_or_add decl_cache ~key:(Decl_cache_entry.Module_decl k) ~default:(fun _ -> None)); add_fun = (fun k v -> Decl_cache.add decl_cache ~key:(Decl_cache_entry.Fun_decl k) ~value:v); get_fun = (fun k -> Decl_cache.find_or_add decl_cache ~key:(Decl_cache_entry.Fun_decl k) ~default:(fun _ -> None)); add_gconst = (fun k v -> Decl_cache.add decl_cache ~key:(Decl_cache_entry.Gconst_decl k) ~value:v); get_gconst = (fun k -> Decl_cache.find_or_add decl_cache ~key:(Decl_cache_entry.Gconst_decl k) ~default:(fun _ -> None)); add_method = (fun _ _ -> ()); get_method = (fun _ -> None); add_static_method = (fun _ _ -> ()); get_static_method = (fun _ -> None); add_prop = (fun _ _ -> ()); get_prop = (fun _ -> None); add_static_prop = (fun _ _ -> ()); get_static_prop = (fun _ -> None); add_constructor = (fun _ _ -> ()); get_constructor = (fun _ -> None); push_local_changes = (fun () -> ()); pop_local_changes = (fun () -> ()); } let set_local_memory_backend_internal ~(max_num_decls : int) ~(max_num_shallow_decls : int) : unit = let local_memory = { decl_cache = Decl_cache.make ~max_size:max_num_decls; folded_class_cache = Folded_class_cache.make ~max_size:max_num_decls; shallow_decl_cache = Shallow_decl_cache.make ~max_size:max_num_shallow_decls; reverse_naming_table_delta = Reverse_naming_table_delta.make (); fixmes = empty_fixmes; naming_db_path_ref = ref None; } in backend_ref := Local_memory local_memory; Decl_store.set @@ make_decl_store_from_local_memory local_memory; () let set_local_memory_backend ~(max_num_decls : int) ~(max_num_shallow_decls : int) = Hh_logger.log "Provider_backend.Local_memory cache sizes: max_num_decls=%d max_num_shallow_decls=%d" max_num_decls max_num_shallow_decls; set_local_memory_backend_internal ~max_num_decls ~max_num_shallow_decls let set_local_memory_backend_with_defaults_for_test () : unit = (* These are all arbitrary, so that test can spin up the backend easily; they haven't been tuned and shouldn't be used in production. *) set_local_memory_backend_internal ~max_num_decls:5000 ~max_num_shallow_decls:(140 * 1024 * 1024) let make_decl_store_from_decl_service (decl : Decl_service_client.t) (tcopts : TypecheckerOptions.t) : Decl_store.decl_store = assert (not (TypecheckerOptions.populate_member_heaps tcopts)); Decl_store. { (* Decl_service must only be used without member heaps. Therefore, add_prop,static_prop,method,static_method_constructor will never be called. And the corresponding get_ methods will always return None, to indicate that the caller in decl_folded_class.ml will have to retrieve the information from a shallow decl. *) add_prop = (fun _ _ -> failwith "decl_service.add_prop"); get_prop = (fun _ -> None); add_static_prop = (fun _ _ -> failwith "decl_service.add_static_prop"); get_static_prop = (fun _ -> None); add_method = (fun _ _ -> failwith "decl_service.add_method"); get_method = (fun _ -> None); add_static_method = (fun _ _ -> failwith "decl_service.add_static_method"); get_static_method = (fun _ -> None); add_constructor = (fun _ _ -> failwith "decl_service.add_constructor"); get_constructor = (fun _ -> None); (* Local_changes is a concept used by hh_server to deliver typechecks for unsaved files. It is not supported by the decl-service. *) pop_local_changes = (fun () -> failwith "decl_service.pop_local_changes"); push_local_changes = (fun () -> failwith "push_local_changes"); (* The {add,get}_{fun,typedef,gconst,module} part of Decl_store API aren't used at all. Instead, when Decl_provider wants one of these things, it calls directly into other APIs in this module. In case you're wondering why "get_shallow_class" isn't also here? Well, it's called by Shallow_classes_provider and it doesn't use the Decl_store API at all; it calls other functions in this current module too. *) get_fun = (fun _ -> failwith "decl_service.get_fun"); add_fun = (fun _ _ -> failwith "decl_service.add_fun"); get_typedef = (fun _ -> failwith "decl_service.get_typedef"); add_typedef = (fun _ _ -> failwith "decl_service.add_typedef"); get_gconst = (fun _ -> failwith "decl_service.get_gconst"); add_gconst = (fun _ _ -> failwith "decl_service.add_gconst"); get_module = (fun _ -> failwith "decl_service.get_module"); add_module = (fun _ _ -> failwith "decl_service.add_module"); (* The {get,add}_class API is used by decl_folded_class.ml in its folding work, invoked by Decl_provider. These two functions deal with folded decls by the way, not shallow. Anyway, our implementation of "get_class" and "add_class" is to use our process-local cache, and also pass through to hh_decl where needed. *) get_class = Decl_service_client.rpc_get_folded_class decl; add_class = Decl_service_client.rpc_store_folded_class decl; } let set_decl_service_backend (decl : Decl_service_client.t) (tcopt : TypecheckerOptions.t) : unit = backend_ref := Decl_service { decl; fixmes = empty_fixmes }; Decl_store.set (make_decl_store_from_decl_service decl tcopt); () let get () : t = !backend_ref let supports_eviction (t : t) : bool = match t with | Pessimised_shared_memory _ | Analysis -> false | Local_memory _ | Decl_service _ | Rust_provider_backend _ | Shared_memory -> true let noop_pessimisation_info = { pessimise_shallow_class = (fun _path ~name:_ x -> x); pessimise_fun = (fun _path ~name:_ x -> x); pessimise_gconst = (fun _path ~name:_ x -> x); pessimise_typedef = (fun _path ~name:_ x -> x); allow_ast_caching = false; store_pessimised_result = false; } let is_pessimised_shared_memory_backend = function | Pessimised_shared_memory _ -> true | _ -> false let get_pessimised_shared_memory_backend_info = function | Pessimised_shared_memory info -> Some info | _ -> None
OCaml Interface
hhvm/hphp/hack/src/providers/provider_backend.mli
(* * Copyright (c) 2019, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** Used by the Pessimised_shared_memory backend to decl-pessimise definitions not already found in the decl heap on-the-fly. *) type pessimisation_info = { pessimise_shallow_class: Relative_path.t -> name:string -> Shallow_decl_defs.shallow_class -> Shallow_decl_defs.shallow_class; pessimise_fun: Relative_path.t -> name:string -> Typing_defs.fun_elt -> Typing_defs.fun_elt; pessimise_gconst: Relative_path.t -> name:string -> Typing_defs.const_decl -> Typing_defs.const_decl; pessimise_typedef: Relative_path.t -> name:string -> Typing_defs.typedef_type -> Typing_defs.typedef_type; allow_ast_caching: bool; (** The [AST_provider] must not use any caches (neither for ASTs or files) if this is unset *) store_pessimised_result: bool; (** Indicates whether or not decl-based pessimiation results (as determined by the functions above) should immediately be stored in the corresponding heaps. *) } module Cache (Entry : Lfu_cache.Entry) : module type of Lfu_cache.Cache (Entry) module Decl_cache_entry : sig type _ t = | Fun_decl : string -> Typing_defs.fun_elt t | Class_decl : string -> Typing_class_types.class_t t | Typedef_decl : string -> Typing_defs.typedef_type t | Gconst_decl : string -> Typing_defs.const_decl t | Module_decl : string -> Typing_defs.module_def_type t type 'a key = 'a t type 'a value = 'a val get_size : key:'a key -> value:'a value -> int val key_to_log_string : 'a key -> string end module Decl_cache : module type of Cache (Decl_cache_entry) module Shallow_decl_cache_entry : sig type _ t = Shallow_class_decl : string -> Shallow_decl_defs.shallow_class t type 'a key = 'a t type 'a value = 'a val get_size : key:'a key -> value:'a value -> int val key_to_log_string : 'a key -> string end module Shallow_decl_cache : module type of Cache (Shallow_decl_cache_entry) module Folded_class_cache_entry : sig type _ t = Folded_class_decl : string -> Decl_defs.decl_class_type t type 'a key = 'a t type 'a value = 'a val get_size : key:'a key -> value:'a value -> int val key_to_log_string : 'a key -> string end module Folded_class_cache : module type of Cache (Folded_class_cache_entry) (** A `fixme_map` associates: line number guarded by HH_FIXME => error_node_number => position of HH_FIXME comment *) type fixme_map = Pos.t IMap.t IMap.t [@@deriving show] module Fixme_store : sig (** a mutable store *) type t val empty : unit -> t val get : t -> Relative_path.t -> fixme_map option val add : t -> Relative_path.t -> fixme_map -> unit val remove : t -> Relative_path.t -> unit val remove_batch : t -> Relative_path.Set.t -> unit end module Fixmes : sig (** The `hh_fixmes` and `decl_hh_fixmes` fields represent the HH_FIXMEs we detected in each file in a full-parse and a decl-parse, respectively. The former will be a superset of the latter, when both are populated and up-to-date. We have both because in some scenarios, we will have only performed a decl-parse on a given file, but wouldn't want to take the FIXMEs we collected during a decl-parse (i.e., one in which we throw away function bodies and any FIXMEs they might contain) during a future full-typecheck of that file. When performing a lookup (in `Fixme_provider`), we check `hh_fixmes` first, and use `decl_hh_fixmes` only if there is no entry in `hh_fixmes`. *) type t = private { hh_fixmes: Fixme_store.t; decl_hh_fixmes: Fixme_store.t; disallowed_fixmes: Fixme_store.t; } val get_telemetry : key:string -> t -> Telemetry.t -> Telemetry.t end module Reverse_naming_table_delta : sig type pos = FileInfo.name_type * Relative_path.t type pos_or_deleted = | Pos of pos * pos list (** Pos(first,rest) is a multiset "first::rest" of positions. An arbitrary one of the positions is stored as 'first', and the rest are stored in 'rest'. This structure represents in ocaml's type system that the 'Pos' case has at least one element in its mltiset. Also, the first position is the one returned when a caller asks what is "the" position for a given symbol -- many callers aren't even aware that there may be multiple positions, and will happily do something reasonable when given an arbitrary one. Our current implementation happens to leave 'first' unchanged until such time as it's removed, at which point it's arbitrary which of 'rest' (if there are any) will be promoted to 'first'. *) | Deleted (** This stores a multimap from symbol name to the position(s) where it's defined. It also stores a lower-case version of the multimap. *) type t = { consts: pos_or_deleted SMap.t ref; funs: pos_or_deleted SMap.t ref; types: pos_or_deleted SMap.t ref; modules: pos_or_deleted SMap.t ref; funs_canon_key: pos_or_deleted SMap.t ref; types_canon_key: pos_or_deleted SMap.t ref; } val get_telemetry : key:string -> t -> Telemetry.t -> Telemetry.t end type local_memory = { shallow_decl_cache: Shallow_decl_cache.t; (** A cache for shallow classes. This corresponds to Shallow_class_heap.Classes used when we use the shared memory backend. *) folded_class_cache: Folded_class_cache.t; (** A cache for folded classes. This corresponds to Decl_heap.Classes used when we use the shared memory backend. *) decl_cache: Decl_cache.t; (** This contains top-level decls: functions, classish types, type aliases, global constants, etc. The classes in this cache correspond to Decl_provider.Cache used when we use the shared memory backend. The other top-level definitions correspond to Decl_heap.Typedefs/GConsts/Modules/etc. *) reverse_naming_table_delta: Reverse_naming_table_delta.t; (** A map from symbol-name to pos. (1) It's used as a slowly updated authoritative place to look for symbols that have changed on disk since the naming-table sqlite. "Slow" means we might be asked to compute TASTs even before reverse_naming_table_delta has been updated to reflect all the change files on disk. It stores 'Deleted' for symbols which have been deleted since the saved-state; once a symbol is in the delta, it never leaves. (2) It's used as a cache of naming-table-sqlite lookups, to speed them up on subsequent queries, since sqlite is slow. (3) If a symbol is defined in two files, the delta will only point to an arbitrary one of those files. (4) It stores "FileInfo.pos" positions. These can be either filename-only or filename-line-col positions. There's no particular invariant enforced about this. We happen to store filename-only for file changes. (5) It stores names, and also canon_key (lowercase) names. For authoritative names, it always stores both. For cached names, it might store one or both. If two symbols have the same canon_key, then the canon_key points to an arbitrary one of them. *) fixmes: Fixmes.t; naming_db_path_ref: Naming_sqlite.db_path option ref; } type t = | Shared_memory (** Used by hh_server and hh_single_type_check *) | Pessimised_shared_memory of pessimisation_info | Local_memory of local_memory (** Used by serverless IDE *) | Decl_service of { decl: Decl_service_client.t; fixmes: Fixmes.t; } (** Used by the hh_server rearchitecture (hh_decl/hh_worker) *) | Rust_provider_backend of Rust_provider_backend.t (** For the Rust port of Provider_backend and decl-folding logic (rupro/hackrs) *) | Analysis val t_to_string : t -> string val set_analysis_backend : unit -> unit val set_shared_memory_backend : unit -> unit val set_pessimised_shared_memory_backend : pessimisation_info -> unit val set_rust_backend : ParserOptions.t -> unit val set_custom_rust_backend : Rust_provider_backend.t -> unit val set_local_memory_backend_with_defaults_for_test : unit -> unit (** TODO(ljw): for now, max_num_shallow_decls accepts a special value "-1" which reflects the status quo ante, a max size of 140mb in bytes rather than a max number. This will be removed shortly. *) val set_local_memory_backend : max_num_decls:int -> max_num_shallow_decls:int -> unit val set_decl_service_backend : Decl_service_client.t -> TypecheckerOptions.t -> unit val get : unit -> t val supports_eviction : t -> bool val get_pessimised_shared_memory_backend_info : t -> pessimisation_info option val is_pessimised_shared_memory_backend : t -> bool val noop_pessimisation_info : pessimisation_info
OCaml
hhvm/hphp/hack/src/providers/provider_context.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude module PositionedSyntaxTree = Full_fidelity_syntax_tree.WithSyntax (Full_fidelity_positioned_syntax) type entry_contents = | Not_yet_read_from_disk | Contents_from_disk of string | Provided_contents of string | Read_contents_from_disk_failed of Exception.t | Raise_exn_on_attempt_to_read type entry = { path: Relative_path.t; mutable contents: entry_contents; mutable source_text: Full_fidelity_source_text.t option; mutable parser_return: Parser_return.t option; mutable ast_errors: Errors.t option; mutable cst: PositionedSyntaxTree.t option; mutable tast: Tast.program Tast_with_dynamic.t option; mutable all_errors: Errors.t option; mutable symbols: Relative_path.t SymbolOccurrence.t list option; } type entries = entry Relative_path.Map.t type t = { popt: ParserOptions.t; tcopt: TypecheckerOptions.t; backend: Provider_backend.t; deps_mode: Typing_deps_mode.t; entries: entries; } let empty_for_tool ~popt ~tcopt ~backend ~deps_mode = { popt; tcopt; backend; deps_mode; entries = Relative_path.Map.empty } let empty_for_worker ~popt ~tcopt ~deps_mode = { popt; tcopt; backend = Provider_backend.Shared_memory; deps_mode; entries = Relative_path.Map.empty; } let empty_for_test ~popt ~tcopt ~deps_mode = { popt; tcopt; backend = Provider_backend.Shared_memory; deps_mode; entries = Relative_path.Map.empty; } let empty_for_debugging ~popt ~tcopt ~deps_mode = { popt; tcopt; backend = Provider_backend.Shared_memory; deps_mode; entries = Relative_path.Map.empty; } let make_entry ~(path : Relative_path.t) ~(contents : entry_contents) : entry = { path; contents; source_text = None; parser_return = None; ast_errors = None; cst = None; tast = None; all_errors = None; symbols = None; } let add_or_overwrite_entry ~(ctx : t) (entry : entry) : t = { ctx with entries = Relative_path.Map.add ctx.entries ~key:entry.path ~data:entry; } let add_or_overwrite_entry_contents ~(ctx : t) ~(path : Relative_path.t) ~(contents : string) : t * entry = let entry = make_entry ~path ~contents:(Provided_contents contents) in (add_or_overwrite_entry ~ctx entry, entry) let add_entry_if_missing ~(ctx : t) ~(path : Relative_path.t) : t * entry = match Relative_path.Map.find_opt ctx.entries path with | Some entry -> (ctx, entry) | None -> let entry = make_entry ~path ~contents:Not_yet_read_from_disk in (add_or_overwrite_entry ~ctx entry, entry) let get_popt (t : t) : ParserOptions.t = t.popt let get_tcopt (t : t) : TypecheckerOptions.t = t.tcopt let get_package_info (t : t) : PackageInfo.t = t.tcopt.GlobalOptions.tco_package_info let map_tcopt (t : t) ~(f : TypecheckerOptions.t -> TypecheckerOptions.t) : t = { t with tcopt = f t.tcopt } let get_backend (t : t) : Provider_backend.t = t.backend let get_deps_mode (t : t) : Typing_deps_mode.t = t.deps_mode let map_deps_mode (t : t) ~(f : Typing_deps_mode.t -> Typing_deps_mode.t) : t = { t with deps_mode = f t.deps_mode } let get_entries (t : t) : entries = t.entries let read_file_contents_exn (entry : entry) : string = match entry.contents with | Provided_contents contents | Contents_from_disk contents -> contents | Not_yet_read_from_disk -> (try let contents = Sys_utils.cat (Relative_path.to_absolute entry.path) in entry.contents <- Contents_from_disk contents; contents with | e -> (* Be sure to capture the exception and mark the entry contents as [Read_contents_from_disk_failed]. Otherwise, reading the contents may not be idempotent: 1) We attempt to read the file from disk, but it doesn't exist, so we raise an exception. 2) The file is created on disk. 3) We attempt to read the file from disk again. Now it exists, and we return a different value. *) let e = Exception.wrap e in entry.contents <- Read_contents_from_disk_failed e; Exception.reraise e) | Raise_exn_on_attempt_to_read -> failwith (Printf.sprintf "Entry %s was marked as Raise_exn_on_attempt_to_read, but an attempt was made to read its contents" (Relative_path.to_absolute entry.path)) | Read_contents_from_disk_failed e -> Exception.reraise e let read_file_contents (entry : entry) : string option = try Some (read_file_contents_exn entry) with | _ -> None let get_file_contents_if_present (entry : entry) : string option = match entry.contents with | Provided_contents contents | Contents_from_disk contents -> Some contents | Not_yet_read_from_disk | Raise_exn_on_attempt_to_read | Read_contents_from_disk_failed _ -> None (** ref_is_quarantined stores the stack at which it was last changed, so we can give better failwith error messages where appropriate. *) let ref_is_quarantined : (bool * Utils.callstack) ref = ref (false, Utils.Callstack "init") let is_quarantined () : bool = !ref_is_quarantined |> fst let set_is_quarantined_internal () : unit = match !ref_is_quarantined with | (true, Utils.Callstack stack) -> failwith ("set_is_quarantined: was already quarantined at\n" ^ stack) | (false, _) -> ref_is_quarantined := (true, Utils.Callstack (Exception.get_current_callstack_string 99)) let unset_is_quarantined_internal () : unit = match !ref_is_quarantined with | (true, _) -> ref_is_quarantined := (false, Utils.Callstack (Exception.get_current_callstack_string 99)) | (false, Utils.Callstack stack) -> failwith ("unset_is_quarantined: but quarantine had already been released at\n" ^ stack) let get_telemetry (t : t) : Telemetry.t = let telemetry = Telemetry.create () |> Telemetry.object_ ~key:"entries" ~value: (Telemetry.create () |> Telemetry.int_ ~key:"count" ~value:(Relative_path.Map.cardinal t.entries) |> Telemetry.int_ ~key:"size" ~value: (Relative_path.Map.fold t.entries ~init:0 ~f:(fun _path entry acc -> let contents = get_file_contents_if_present entry |> Option.value ~default:"" in acc + String.length contents))) |> Telemetry.string_ ~key:"backend" ~value:(t.backend |> Provider_backend.t_to_string) |> Telemetry.object_ ~key:"SharedMem" ~value:(SharedMem.SMTelemetry.get_telemetry ()) (* We get SharedMem telemetry for all providers, not just the SharedMem provider, just in case there are code paths which use SharedMem despite it not being the intended provider. *) in match t.backend with | Provider_backend.Local_memory { Provider_backend.shallow_decl_cache; decl_cache; folded_class_cache; reverse_naming_table_delta; fixmes; naming_db_path_ref = _; } -> let open Provider_backend in telemetry |> Decl_cache.get_telemetry decl_cache ~key:"decl_cache" |> Shallow_decl_cache.get_telemetry shallow_decl_cache ~key:"shallow_decl_cache" |> Folded_class_cache.get_telemetry folded_class_cache ~key:"folded_class_cache" |> Reverse_naming_table_delta.get_telemetry reverse_naming_table_delta ~key:"reverse_naming_table_delta" |> Fixmes.get_telemetry fixmes ~key:"fixmes" | _ -> telemetry let reset_telemetry (t : t) : unit = match t.backend with | Provider_backend.Local_memory { Provider_backend.shallow_decl_cache; decl_cache; folded_class_cache; reverse_naming_table_delta = _; fixmes = _; naming_db_path_ref = _; } -> Provider_backend.Decl_cache.reset_telemetry decl_cache; Provider_backend.Shallow_decl_cache.reset_telemetry shallow_decl_cache; Provider_backend.Folded_class_cache.reset_telemetry folded_class_cache; () | _ -> () let ctx_with_pessimisation_info_exn ctx info = match ctx.backend with | Provider_backend.Pessimised_shared_memory _ -> { ctx with backend = Provider_backend.Pessimised_shared_memory info } | _ -> failwith "This operation is only supported on contexts with a Provider_backend.Pessimised_shared_memory backend." let noautodynamic this_class = match this_class with | None -> false | Some sc -> Typing_defs.Attributes.mem Naming_special_names.UserAttributes.uaNoAutoDynamic sc.Shallow_decl_defs.sc_user_attributes let implicit_sdt_for_class ctx this_class = TypecheckerOptions.everything_sdt (get_tcopt ctx) && not (noautodynamic this_class) let implicit_sdt_for_fun ctx fe = TypecheckerOptions.everything_sdt (get_tcopt ctx) && not fe.Typing_defs.fe_no_auto_dynamic let no_auto_likes_for_fun fe = fe.Typing_defs.fe_no_auto_likes let set_autocomplete_mode t = let tcopt = TypecheckerOptions.set_tco_autocomplete_mode t.tcopt in { t with tcopt }
OCaml Interface
hhvm/hphp/hack/src/providers/provider_context.mli
(** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module PositionedSyntaxTree : sig include module type of Full_fidelity_syntax_tree.WithSyntax (Full_fidelity_positioned_syntax) end (** Represents the file contents for a given entry. We may lazily load the file contents from disk, depending on how the entry is constructed. *) type entry_contents = | Not_yet_read_from_disk (** We've been provided a file path, but have not yet requested the contents to be read from disk. They will be read from disk the next time it is requested. *) | Contents_from_disk of string (** Terminal state. We transitioned from [Not_yet_read_from_disk] into this state after reading the file from disk. Do not construct directly; use [Provided_contents] instead. *) | Provided_contents of string (** Terminal state. The file contents were provided explicitly when this entry was constructed. We will not read from disk. *) | Read_contents_from_disk_failed of Exception.t (** Terminal state. We transitioned from [Not_yet_read_from_disk] into this state after attempting to read the file from disk, but encountering an exception. *) | Raise_exn_on_attempt_to_read (** Terminal state. Raise an exception on an attempt to read the file contents from this file. *) (** Various information associated with a given file. It's important to create an [entry] when processing data about a single file for two reasons: - To ensure that subsequent operations on the same file have a consistent view of the same file. That is, they won't read the file from disk twice and potentially introduce a race condition. - To ensure that subsequent operations on the same file don't recalculate the same data (such as an AST). This is important for performance, particularly for IDE operation latency. To create a new entry for a file, use [Provider_context.add_entry]. There should generally be no more than one or two entries inside the [Provider_context.t] at a given time. Be careful not to try to store every single file's data in memory at once. Once you're done processing a file (e.g. you have the TAST and don't need to access further data), then you should discard the [entry] and the [Provider_context.t] that it came from. All of these fields are monotonic, unless otherwise noted. They only transition forward through states, never backwards (e.g. from None -> Some), and don't lose information in doing so. Monotonic fields don't need to be invalidated. *) type entry = { path: Relative_path.t; mutable contents: entry_contents; (** Derived from file contents of [path], which was possibly read from disk. *) mutable source_text: Full_fidelity_source_text.t option; (** Derived from [contents]; contains additional preprocessing. *) mutable parser_return: Parser_return.t option; (** this parser_return, if present, came from source_text via Ast_provider.parse under ~full:true *) mutable ast_errors: Errors.t option; (** same invariant as parser_return *) mutable cst: PositionedSyntaxTree.t option; mutable tast: Tast.program Tast_with_dynamic.t option; (** NOT monotonic: depends on the decls of other files. *) mutable all_errors: Errors.t option; (** NOT monotonic for the same reason as [tast]. *) mutable symbols: Relative_path.t SymbolOccurrence.t list option; } (** We often operate on collection of entries. *) type entries = entry Relative_path.Map.t (** A context allowing the caller access to data for files and symbols in the codebase. In particular, this is used as a parameter to [Decl_provider] functions to access the decl for a given symbol. Depending on the [backend] setting, data may be cached in local memory, in shared memory, out of process, etc. You can examine an individual file in the codebase by constructing an [entry] for it. For example, you can call [Provider_context.add_entry] to create a new [entry], and then [Tast_provider.compute_tast_and_errors_unquarantined]. Some operations may make changes to global state (e.g. write to shared memory heaps). To ensure that no changes escape the scope of your operation, use [Provider_utils.respect_but_quarantine_unsaved_changes]. *) type t (** The empty context, for use at the top-level of stand-alone tools which don't have a [ServerEnv.env]. If you have a [ServerEnv.env], you probably want to use [Provider_utils.ctx_from_server_env] instead. *) val empty_for_tool : popt:ParserOptions.t -> tcopt:TypecheckerOptions.t -> backend:Provider_backend.t -> deps_mode:Typing_deps_mode.t -> t (** The empty context, for use with Multiworker workers. This assumes that the backend is shared memory. We don't want to serialize and send the entire [ServerEnv.env] to these workers because a [ServerEnv.env] contains large data objects (such as the forward naming table). *) val empty_for_worker : popt:ParserOptions.t -> tcopt:TypecheckerOptions.t -> deps_mode:Typing_deps_mode.t -> t (** The empty context, for use in tests, where there may not be a [ServerEnv.env] available. *) val empty_for_test : popt:ParserOptions.t -> tcopt:TypecheckerOptions.t -> deps_mode:Typing_deps_mode.t -> t (** The empty context, for use in debugging aides in production code, where there may not be a [ServerEnv.env] available. *) val empty_for_debugging : popt:ParserOptions.t -> tcopt:TypecheckerOptions.t -> deps_mode:Typing_deps_mode.t -> t (** Creates an entry. *) val make_entry : path:Relative_path.t -> contents:entry_contents -> entry (** Adds the entry into the supplied [Provider_context.t], overwriting if the context already had an entry of the same path, and returns a new [Provider_context.t] which includes that entry. Note: for most callers, [add_entry_if_missing] is more appropriate. *) val add_or_overwrite_entry : ctx:t -> entry -> t (** Similar to [add_or_overwrite_entry], but makes a new entry with contents. Also returns the new entry for convenience. It's important that callers use the resulting [Provider_context.t]. That way, if a subsequent operation tries to access data about the same file, it will get the [entry] we just added rather than reading from disk. *) val add_or_overwrite_entry_contents : ctx:t -> path:Relative_path.t -> contents:string -> t * entry (** Similar to [add_entry], but (1) returns the existing entry if one was already there, (2) if one wasn't there, then adds a new entry by reading the contents of the path from disk; may throw if those contents can't be read. *) val add_entry_if_missing : ctx:t -> path:Relative_path.t -> t * entry (** Get the [ParserOptions.t] contained within the [t]. *) val get_popt : t -> ParserOptions.t (** Get the [TypecheckerOptions.t] contained within the [t]. *) val get_tcopt : t -> TypecheckerOptions.t (** Update the [TypecheckerOptions.t] contained within the [t]. *) val map_tcopt : t -> f:(TypecheckerOptions.t -> TypecheckerOptions.t) -> t (** Get the [Provider_backend.t] that backs this [t]. *) val get_backend : t -> Provider_backend.t (** Get the [Typing_deps_mode.t] that backs this [t]. *) val get_deps_mode : t -> Typing_deps_mode.t (** Update the [Typing_deps_mode.t] that backs this [t]. *) val map_deps_mode : t -> f:(Typing_deps_mode.t -> Typing_deps_mode.t) -> t (** Get the entries currently contained in this [t]. *) val get_entries : t -> entries (** Return the contents for the file backing this entry. This may involve a disk read if the [entry_contents] are backed by disk. Consequently, this function may fail and return [None]. Idempotent: future calls to this function will return the same value. *) val read_file_contents : entry -> string option (** Same as [read_file_contents], but raises an exception if the file contents could not be read. Idempotent: future calls to this function will return the same value or raise the same exception. *) val read_file_contents_exn : entry -> string (** Get the file contents from this entry if they've already been computed, otherwise return [None]. This is mostly useful for telemetry, which doesn't want to trigger a file-read event. *) val get_file_contents_if_present : entry -> string option (** Are we within [Provider_utils.respect_but_quarantine_unsaved_changes] ? *) val is_quarantined : unit -> bool (** Internal functions **) (** Called by [Provider_utils.respect_but_quarantine_unsaved_changes] upon entry. Don't call it directly yourself. *) val set_is_quarantined_internal : unit -> unit (** Called by [Provider_utils.respect_but_quarantine_unsaved_changes] upon exit. Don't call it directly yourself. *) val unset_is_quarantined_internal : unit -> unit (** Telemetry for a provider_context includes the current cache state of its backend, plus 'counters' like how many times cache has been read or evicted. *) val get_telemetry : t -> Telemetry.t (** This function resets the 'counters' associated with telemetry. *) val reset_telemetry : t -> unit (** Given a context that uses [Provider_backend.Pessimised_shared_memory] as its backend, return a context with the backend updated to use the given [pessimisation_info] instead. Due to the stateful nature of setting backends in general (see the Provider_backend.set_* functions), we make no attempt to support situations where the original backend isn't [Provider_backend.Pessimised_shared_memory] already and fail instead. *) val ctx_with_pessimisation_info_exn : t -> Provider_backend.pessimisation_info -> t val implicit_sdt_for_class : t -> Shallow_decl_defs.shallow_class option -> bool val implicit_sdt_for_fun : t -> Shallow_decl_defs.fun_decl -> bool val no_auto_likes_for_fun : Shallow_decl_defs.fun_decl -> bool val get_package_info : t -> PackageInfo.t (** Set the type checker options to track autocomplete mode *) val set_autocomplete_mode : t -> t
OCaml
hhvm/hphp/hack/src/providers/provider_utils.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude let invalidate_tast_cache_of_entry (entry : Provider_context.entry) : unit = entry.Provider_context.tast <- None; entry.Provider_context.all_errors <- None; () let invalidate_local_decl_caches_for_file (local_memory : Provider_backend.local_memory) (file_info : FileInfo.t) : unit = let { Provider_backend.shallow_decl_cache; folded_class_cache; decl_cache; reverse_naming_table_delta = _; fixmes = _; naming_db_path_ref = _; } = local_memory in let open FileInfo in let open Provider_backend in (* Consideration: would it have been better to decl-diff, detect when the shallow-decls are unchanged if so then avoid invalidating all the folded-decls and TASTs? Maybe. That would be better in the case of one file change, but worse in the case of 5000 file changes. *) (* Shallow decl cache: we only need clear the ones affected *) List.iter file_info.classes ~f:(fun (_, name, _) -> Shallow_decl_cache.remove shallow_decl_cache ~key:(Shallow_decl_cache_entry.Shallow_class_decl name)); (* Decl cache: we don't track fine-grained dependencies, and therefore we should be evicting everything. It might be possible to do decl-diffing on shallow-decls and if they're unchanged, then avoid invalidating the folded decls. That would be better in the case of just one disk file change notification, but worse in the case of 5000 since it'd require getting shallow-decls on all of them just to compare, even if they weren't actually needed. I tried evicting everything but it was far too slow. That will need to be fixed. But for now, let's settle for evicting decls which we know are affected. This way at least the user has a fallback of opening relevant files in the IDE to get their relevant decls correct. *) let open Provider_backend.Decl_cache_entry in List.iter file_info.consts ~f:(fun (_, name, _) -> Decl_cache.remove decl_cache ~key:(Gconst_decl name)); List.iter file_info.funs ~f:(fun (_, name, _) -> Decl_cache.remove decl_cache ~key:(Fun_decl name)); List.iter file_info.typedefs ~f:(fun (_, name, _) -> Decl_cache.remove decl_cache ~key:(Typedef_decl name)); List.iter file_info.classes ~f:(fun (_, name, _) -> Decl_cache.remove decl_cache ~key:(Class_decl name); Folded_class_cache.remove folded_class_cache ~key:(Folded_class_cache_entry.Folded_class_decl name)); () let invalidate_local_decl_caches_for_entries (local_memory : Provider_backend.local_memory) (entries : Provider_context.entries) : unit = let invalidate_for_entry _path entry = match entry.Provider_context.parser_return with | None -> () (* hasn't been parsed, hence nothing to invalidate *) | Some { Parser_return.ast; _ } -> let file_info = Nast.get_def_names ast in invalidate_local_decl_caches_for_file local_memory file_info in Relative_path.Map.iter entries ~f:invalidate_for_entry let ctx_from_server_env (env : ServerEnv.env) : Provider_context.t = (* TODO: backend should be stored in [env]. *) Provider_context.empty_for_tool ~popt:env.ServerEnv.popt ~tcopt:env.ServerEnv.tcopt ~backend:(Provider_backend.get ()) ~deps_mode:env.ServerEnv.deps_mode let respect_but_quarantine_unsaved_changes ~(ctx : Provider_context.t) ~(f : unit -> 'a) : 'a = let backend_pushed = ref false in let quarantine_set = ref false in (* This function will (1) enter quarantine, (2) do the callback "f", (3) leave quarantine. If an exception arises during step (1,2) then nevertheless we guarantee that quarantine is safely left. If an exception arises during step (3) then we'll raise an exception but the program state has become unstable... *) let enter_quarantine_exn () = begin match Provider_context.get_backend ctx with | Provider_backend.Shared_memory -> Ast_provider.local_changes_push_sharedmem_stack (); Decl_provider.local_changes_push_sharedmem_stack (); Shallow_classes_provider.local_changes_push_sharedmem_stack (); File_provider.local_changes_push_sharedmem_stack (); Fixme_provider.local_changes_push_sharedmem_stack (); Naming_provider.local_changes_push_sharedmem_stack (); SharedMem.set_allow_hashtable_writes_by_current_process false | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.push_local_changes backend; Ast_provider.local_changes_push_sharedmem_stack (); (* Shallow classes are stored in Rust when we're using Rust_provider_backend, but member filters are not, so we still need to push/pop the sharedmem stack for member filters. *) Shallow_classes_provider.local_changes_push_sharedmem_stack (); Fixme_provider.local_changes_push_sharedmem_stack (); SharedMem.set_allow_hashtable_writes_by_current_process false | Provider_backend.Local_memory local -> Relative_path.Map.iter (Provider_context.get_entries ctx) ~f:(fun _path entry -> let (_ : Nast.program) = Ast_provider.compute_ast ~popt:(Provider_context.get_popt ctx) ~entry in ()); invalidate_local_decl_caches_for_entries local (Provider_context.get_entries ctx) | _ -> () end; backend_pushed := true; Ide_parser_cache.activate (); Errors.set_allow_errors_in_default_path true; Provider_context.set_is_quarantined_internal (); quarantine_set := true; () in let leave_quarantine_exn () = if !quarantine_set then Provider_context.unset_is_quarantined_internal (); Errors.set_allow_errors_in_default_path false; Ide_parser_cache.deactivate (); if !backend_pushed then match Provider_context.get_backend ctx with | Provider_backend.Shared_memory -> Ast_provider.local_changes_pop_sharedmem_stack (); Decl_provider.local_changes_pop_sharedmem_stack (); Shallow_classes_provider.local_changes_pop_sharedmem_stack (); File_provider.local_changes_pop_sharedmem_stack (); Fixme_provider.local_changes_pop_sharedmem_stack (); Naming_provider.local_changes_pop_sharedmem_stack (); SharedMem.set_allow_hashtable_writes_by_current_process true; SharedMem.invalidate_local_caches () | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.pop_local_changes backend; Ast_provider.local_changes_pop_sharedmem_stack (); Shallow_classes_provider.local_changes_pop_sharedmem_stack (); Fixme_provider.local_changes_pop_sharedmem_stack (); SharedMem.set_allow_hashtable_writes_by_current_process true; SharedMem.invalidate_local_caches () | Provider_backend.Local_memory local -> invalidate_local_decl_caches_for_entries local (Provider_context.get_entries ctx) | _ -> () in let (_errors, result) = Errors.do_ (fun () -> Utils.try_finally ~f:(fun () -> enter_quarantine_exn (); f ()) ~finally:leave_quarantine_exn) in result
OCaml Interface
hhvm/hphp/hack/src/providers/provider_utils.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) (** Construct a [Provider_context.t] from the configuration information contained within a [ServerEnv.env]. *) val ctx_from_server_env : ServerEnv.env -> Provider_context.t (** Load the declarations of [t] into any global-memory storage, then call [f], then unload those declarations. Quarantine is REQUIRED in clientIdeDaemon and hh_server scenarios because it embodies local-file-changes and the naming- table updates therein, and if you try to typecheck a local files without those updates them it will often fail. Quarantine is INAPPROPRIATE in mapreduce or other bulk-checking scenarios which operate solely off files-on-disk and have no concept of unsaved-file-changes. TODO: It's a bit confusing that quarantining is predicated upon ctx, and hopefully we'll remove that dependency in future. *) val respect_but_quarantine_unsaved_changes : ctx:Provider_context.t -> f:(unit -> 'a) -> 'a val invalidate_tast_cache_of_entry : Provider_context.entry -> unit val invalidate_local_decl_caches_for_file : Provider_backend.local_memory -> FileInfo.t -> unit val invalidate_local_decl_caches_for_entries : Provider_backend.local_memory -> Provider_context.entries -> unit
OCaml
hhvm/hphp/hack/src/providers/rust_provider_backend.ml
(* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude type t external register_custom_types : unit -> unit = "hh_rust_provider_backend_register_custom_types" let () = register_custom_types () external make_ffi : root:string -> hhi_root:string -> tmp:string -> ParserOptions.t -> t = "hh_rust_provider_backend_make" external push_local_changes_ffi : t -> unit = "hh_rust_provider_backend_push_local_changes" external pop_local_changes_ffi : t -> unit = "hh_rust_provider_backend_pop_local_changes" external set_ctx_empty : t -> bool -> unit = "hh_rust_provider_backend_set_ctx_empty" [@@noalloc] type find_symbol_fn = string -> (FileInfo.pos * FileInfo.name_type) option type ctx_proxy = { get_entry_contents: Relative_path.t -> string option; is_pos_in_ctx: FileInfo.pos -> bool; find_fun_canon_name_in_context: string -> string option; find_type_canon_name_in_context: string -> string option; find_const_in_context: find_symbol_fn; find_fun_in_context: find_symbol_fn; find_type_in_context: find_symbol_fn; find_module_in_context: find_symbol_fn; } let ctx_proxy_ref : ctx_proxy option ref = ref None let with_ctx_proxy_opt t ctx_proxy_opt f = assert (Option.is_none !ctx_proxy_ref); ctx_proxy_ref := ctx_proxy_opt; set_ctx_empty t (Option.is_none ctx_proxy_opt); try let result = f () in ctx_proxy_ref := None; set_ctx_empty t true; result with | e -> ctx_proxy_ref := None; set_ctx_empty t true; raise e let get_entry_contents x = match !ctx_proxy_ref with | None -> None | Some { get_entry_contents; _ } -> get_entry_contents x let is_pos_in_ctx x = match !ctx_proxy_ref with | None -> false | Some { is_pos_in_ctx; _ } -> is_pos_in_ctx x let find_fun_canon_name_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_fun_canon_name_in_context; _ } -> find_fun_canon_name_in_context x let find_type_canon_name_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_type_canon_name_in_context; _ } -> find_type_canon_name_in_context x let find_const_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_const_in_context; _ } -> find_const_in_context x let find_fun_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_fun_in_context; _ } -> find_fun_in_context x let find_type_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_type_in_context; _ } -> find_type_in_context x let find_module_in_context x = match !ctx_proxy_ref with | None -> None | Some { find_module_in_context; _ } -> find_module_in_context x let () = Callback.register "hh_rust_provider_backend_get_entry_contents" get_entry_contents; Callback.register "hh_rust_provider_backend_is_pos_in_ctx" is_pos_in_ctx; Callback.register "hh_rust_provider_backend_find_fun_canon_name_in_context" find_fun_canon_name_in_context; Callback.register "hh_rust_provider_backend_find_type_canon_name_in_context" find_type_canon_name_in_context; Callback.register "hh_rust_provider_backend_find_const_in_context" find_const_in_context; Callback.register "hh_rust_provider_backend_find_fun_in_context" find_fun_in_context; Callback.register "hh_rust_provider_backend_find_type_in_context" find_type_in_context; Callback.register "hh_rust_provider_backend_find_module_in_context" find_module_in_context; () module Decl = struct module type Store = sig type key type value val get : t -> key -> value option val clear_cache : unit -> unit end module StoreWithLocalCache (Key : SharedMem.Key) (Value : SharedMem.Value) (Ffi : sig val get : t -> Key.t -> Value.t option end) : Store with type key = Key.t and type value = Value.t = struct type key = Key.t type value = Value.t module Cache = SharedMem.FreqCache (Key) (Value) (struct let capacity = 1000 end) let clear_cache = Cache.clear let log_hit_rate ~hit = let hit = if hit then 1. else 0. in Measure.sample (Value.description ^ " (ffi cache hit rate)") hit; Measure.sample "ALL ffi cache hit rate" hit let get t key = let v = Cache.get key in if SharedMem.SMTelemetry.hh_log_level () > 0 then log_hit_rate ~hit:(Option.is_some v); match v with | Some _ -> v | None -> let value_opt = Ffi.get t key in (match value_opt with | Some value -> Cache.add key value | None -> ()); value_opt end module Funs = StoreWithLocalCache (StringKey) (struct type t = Shallow_decl_defs.fun_decl let description = "Decl_Fun" end) (struct external get : t -> string -> Shallow_decl_defs.fun_decl option = "hh_rust_provider_backend_get_fun" end) module ShallowClasses = StoreWithLocalCache (StringKey) (struct type t = Shallow_decl_defs.class_decl let description = "Decl_ShallowClass" end) (struct external get : t -> string -> Shallow_decl_defs.class_decl option = "hh_rust_provider_backend_get_shallow_class" end) module Typedefs = StoreWithLocalCache (StringKey) (struct type t = Shallow_decl_defs.typedef_decl let description = "Decl_Typedef" end) (struct external get : t -> string -> Shallow_decl_defs.typedef_decl option = "hh_rust_provider_backend_get_typedef" end) module GConsts = StoreWithLocalCache (StringKey) (struct type t = Shallow_decl_defs.const_decl let description = "Decl_GConst" end) (struct external get : t -> string -> Shallow_decl_defs.const_decl option = "hh_rust_provider_backend_get_gconst" end) module Modules = StoreWithLocalCache (StringKey) (struct type t = Shallow_decl_defs.module_decl let description = "Decl_Module" end) (struct external get : t -> string -> Shallow_decl_defs.module_decl option = "hh_rust_provider_backend_get_module" end) module FoldedClasses = StoreWithLocalCache (StringKey) (struct type t = Decl_defs.decl_class_type let description = "Decl_Class" end) (struct external get : t -> string -> Decl_defs.decl_class_type option = "hh_rust_provider_backend_get_folded_class" end) let decl_store t = let noop_add _ _ = () in let noop () = () in (* Rely on lazy member lookup. *) let get_none _ = None in Decl_store. { add_prop = noop_add; get_prop = get_none; add_static_prop = noop_add; get_static_prop = get_none; add_method = noop_add; get_method = get_none; add_static_method = noop_add; get_static_method = get_none; add_constructor = noop_add; get_constructor = get_none; add_class = noop_add; get_class = FoldedClasses.get t; add_fun = noop_add; get_fun = Funs.get t; add_typedef = noop_add; get_typedef = Typedefs.get t; add_gconst = noop_add; get_gconst = GConsts.get t; add_module = noop_add; get_module = Modules.get t; pop_local_changes = noop; push_local_changes = noop; } let did_set_decl_store = ref false let set_decl_store t = if not !did_set_decl_store then ( did_set_decl_store := true; Decl_store.set (decl_store t) ) external direct_decl_parse_and_cache : t -> Relative_path.t -> string -> Direct_decl_parser.parsed_file_with_hashes = "hh_rust_provider_backend_direct_decl_parse_and_cache" let direct_decl_parse_and_cache t = set_decl_store t; direct_decl_parse_and_cache t external add_shallow_decls : t -> (string * Shallow_decl_defs.decl) list -> unit = "hh_rust_provider_backend_add_shallow_decls" let add_shallow_decls t = set_decl_store t; add_shallow_decls t let get_fun t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> Funs.get t name let get_shallow_class t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> ShallowClasses.get t name let get_typedef t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> Typedefs.get t name let get_gconst t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> GConsts.get t name let get_module t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> Modules.get t name let get_folded_class t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> FoldedClasses.get t name external oldify_defs_ffi : t -> FileInfo.names -> unit = "hh_rust_provider_backend_oldify_defs" external remove_old_defs_ffi : t -> FileInfo.names -> unit = "hh_rust_provider_backend_remove_old_defs" external remove_defs_ffi : t -> FileInfo.names -> unit = "hh_rust_provider_backend_remove_defs" external get_old_defs_ffi : t -> FileInfo.names -> Shallow_decl_defs.class_decl option SMap.t * Shallow_decl_defs.fun_decl option SMap.t * Shallow_decl_defs.typedef_decl option SMap.t * Shallow_decl_defs.const_decl option SMap.t * Shallow_decl_defs.module_decl option SMap.t = "hh_rust_provider_backend_get_old_defs" let clear_caches () = Funs.clear_cache (); ShallowClasses.clear_cache (); FoldedClasses.clear_cache (); Typedefs.clear_cache (); GConsts.clear_cache (); Modules.clear_cache (); () let oldify_defs t names = set_decl_store t; oldify_defs_ffi t names; clear_caches (); () let remove_old_defs t names = set_decl_store t; remove_old_defs_ffi t names; clear_caches (); () let remove_defs t names = set_decl_store t; remove_defs_ffi t names; clear_caches (); () let get_old_defs t names = set_decl_store t; get_old_defs_ffi t names external declare_folded_class : t -> string -> unit = "hh_rust_provider_backend_declare_folded_class" let declare_folded_class t ctx name = set_decl_store t; with_ctx_proxy_opt t ctx @@ fun () -> declare_folded_class t name end let make popt = let backend = make_ffi ~root:Relative_path.(path_of_prefix Root) ~hhi_root:Relative_path.(path_of_prefix Hhi) ~tmp:Relative_path.(path_of_prefix Tmp) popt in Decl.set_decl_store backend; backend let set backend = Decl.set_decl_store backend let push_local_changes t = Decl.clear_caches (); push_local_changes_ffi t let pop_local_changes t = Decl.clear_caches (); pop_local_changes_ffi t module File = struct type file_type = | Disk of string | Ide of string external get : t -> Relative_path.t -> file_type option = "hh_rust_provider_backend_file_provider_get" external get_contents : t -> Relative_path.t -> string = "hh_rust_provider_backend_file_provider_get_contents" external provide_file_for_tests : t -> Relative_path.t -> string -> unit = "hh_rust_provider_backend_file_provider_provide_file_for_tests" external provide_file_for_ide : t -> Relative_path.t -> string -> unit = "hh_rust_provider_backend_file_provider_provide_file_for_ide" external provide_file_hint : t -> Relative_path.t -> file_type -> unit = "hh_rust_provider_backend_file_provider_provide_file_hint" external remove_batch : t -> Relative_path.Set.t -> unit = "hh_rust_provider_backend_file_provider_remove_batch" end module Naming = struct module type ReverseNamingTable = sig type pos val add : t -> string -> pos -> unit val get_pos : t -> ctx_proxy option -> string -> pos option val remove_batch : t -> string list -> unit end module Types = struct type pos = FileInfo.pos * Naming_types.kind_of_type external add : t -> string -> pos -> unit = "hh_rust_provider_backend_naming_types_add" external get_pos : t -> string -> pos option = "hh_rust_provider_backend_naming_types_get_pos" external remove_batch : t -> string list -> unit = "hh_rust_provider_backend_naming_types_remove_batch" external get_canon_name : t -> string -> string option = "hh_rust_provider_backend_naming_types_get_canon_name" let get_pos t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_pos t name let get_canon_name t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_canon_name t name end module Funs = struct type pos = FileInfo.pos external add : t -> string -> pos -> unit = "hh_rust_provider_backend_naming_funs_add" external get_pos : t -> string -> pos option = "hh_rust_provider_backend_naming_funs_get_pos" external remove_batch : t -> string list -> unit = "hh_rust_provider_backend_naming_funs_remove_batch" external get_canon_name : t -> string -> string option = "hh_rust_provider_backend_naming_funs_get_canon_name" let get_pos t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_pos t name let get_canon_name t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_canon_name t name end module Consts = struct type pos = FileInfo.pos external add : t -> string -> pos -> unit = "hh_rust_provider_backend_naming_consts_add" external get_pos : t -> string -> pos option = "hh_rust_provider_backend_naming_consts_get_pos" external remove_batch : t -> string list -> unit = "hh_rust_provider_backend_naming_consts_remove_batch" let get_pos t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_pos t name end module Modules = struct type pos = FileInfo.pos external add : t -> string -> pos -> unit = "hh_rust_provider_backend_naming_modules_add" external get_pos : t -> string -> pos option = "hh_rust_provider_backend_naming_modules_get_pos" external remove_batch : t -> string list -> unit = "hh_rust_provider_backend_naming_modules_remove_batch" let get_pos t ctx name = with_ctx_proxy_opt t ctx @@ fun () -> get_pos t name end external get_db_path_ffi : t -> string option = "hh_rust_provider_backend_naming_get_db_path" let get_db_path t = get_db_path_ffi t |> Option.map ~f:(fun path -> Naming_sqlite.Db_path path) external set_db_path_ffi : t -> string -> unit = "hh_rust_provider_backend_naming_set_db_path" let set_db_path t (Naming_sqlite.Db_path path) = set_db_path_ffi t path external get_filenames_by_hash : t -> Typing_deps.DepSet.t -> Relative_path.Set.t = "hh_rust_provider_backend_naming_get_filenames_by_hash" end
OCaml Interface
hhvm/hphp/hack/src/providers/rust_provider_backend.mli
(* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type t val make : ParserOptions.t -> t (** Initialize with a given [Rust_provider_backend.t] value (constructed on the Rust side) instead of the default backend constructed by [make]. *) val set : t -> unit val push_local_changes : t -> unit val pop_local_changes : t -> unit type find_symbol_fn = string -> (FileInfo.pos * FileInfo.name_type) option type ctx_proxy = { get_entry_contents: Relative_path.t -> string option; is_pos_in_ctx: FileInfo.pos -> bool; find_fun_canon_name_in_context: string -> string option; find_type_canon_name_in_context: string -> string option; find_const_in_context: find_symbol_fn; find_fun_in_context: find_symbol_fn; find_type_in_context: find_symbol_fn; find_module_in_context: find_symbol_fn; } module Decl : sig val direct_decl_parse_and_cache : t -> Relative_path.t -> string -> Direct_decl_parser.parsed_file_with_hashes (** Directly add decls to the underlying store without processing them (no removing php_stdlib decls, deduping, or removing naming conflict losers) *) val add_shallow_decls : t -> (string * Shallow_decl_defs.decl) list -> unit val get_fun : t -> ctx_proxy option -> string -> Shallow_decl_defs.fun_decl option val get_shallow_class : t -> ctx_proxy option -> string -> Shallow_decl_defs.class_decl option val get_typedef : t -> ctx_proxy option -> string -> Shallow_decl_defs.typedef_decl option val get_gconst : t -> ctx_proxy option -> string -> Shallow_decl_defs.const_decl option val get_module : t -> ctx_proxy option -> string -> Shallow_decl_defs.module_decl option val get_folded_class : t -> ctx_proxy option -> string -> Decl_defs.decl_class_type option val declare_folded_class : t -> ctx_proxy option -> string -> unit val get_old_defs : t -> FileInfo.names -> Shallow_decl_defs.class_decl option SMap.t * Shallow_decl_defs.fun_decl option SMap.t * Shallow_decl_defs.typedef_decl option SMap.t * Shallow_decl_defs.const_decl option SMap.t * Shallow_decl_defs.module_decl option SMap.t val oldify_defs : t -> FileInfo.names -> unit val remove_defs : t -> FileInfo.names -> unit val remove_old_defs : t -> FileInfo.names -> unit end module File : sig type file_type = | Disk of string | Ide of string val get : t -> Relative_path.t -> file_type option val get_contents : t -> Relative_path.t -> string val provide_file_for_tests : t -> Relative_path.t -> string -> unit val provide_file_for_ide : t -> Relative_path.t -> string -> unit val provide_file_hint : t -> Relative_path.t -> file_type -> unit val remove_batch : t -> Relative_path.Set.t -> unit end module Naming : sig module type ReverseNamingTable = sig type pos val add : t -> string -> pos -> unit val get_pos : t -> ctx_proxy option -> string -> pos option val remove_batch : t -> string list -> unit end module Types : sig include ReverseNamingTable with type pos = FileInfo.pos * Naming_types.kind_of_type val get_canon_name : t -> ctx_proxy option -> string -> string option end module Funs : sig include ReverseNamingTable with type pos = FileInfo.pos val get_canon_name : t -> ctx_proxy option -> string -> string option end module Consts : sig include ReverseNamingTable with type pos = FileInfo.pos end module Modules : sig include ReverseNamingTable with type pos = FileInfo.pos end val get_db_path : t -> Naming_sqlite.db_path option val set_db_path : t -> Naming_sqlite.db_path -> unit (** This function searches all three namespaces (types, funs, consts) to find which one contains each Dep.t. The earlier functions in this module only search one specified namespace. Note: this function doesn't use the sharedmem cache of names - doesn't benefit from it, doesn't write into it. *) val get_filenames_by_hash : t -> Typing_deps.DepSet.t -> Relative_path.Set.t end
Rust
hhvm/hphp/hack/src/providers/rust_provider_backend_api.rs
// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use ty::reason::Reason; /// A trait which includes only the ProviderBackend functionality necessary to /// typecheck a file. pub trait RustProviderBackend<R: Reason> { fn file_provider(&self) -> &dyn file_provider::FileProvider; fn naming_provider(&self) -> &dyn naming_provider::NamingProvider; fn shallow_decl_provider(&self) -> &dyn shallow_decl_provider::ShallowDeclProvider<R>; fn folded_decl_provider(&self) -> &dyn folded_decl_provider::FoldedDeclProvider<R>; fn as_any(&self) -> &dyn std::any::Any; }
Rust
hhvm/hphp/hack/src/providers/rust_provider_backend_ffi.rs
// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::collections::BTreeMap; use std::collections::BTreeSet; use std::path::PathBuf; use std::sync::Arc; use hackrs_provider_backend::Config; use hackrs_provider_backend::FileType; use hackrs_provider_backend::HhServerProviderBackend; use ocamlrep::ptr::UnsafeOcamlPtr; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use ocamlrep_custom::Custom; use ocamlrep_ocamlpool::ocaml_ffi; use ocamlrep_ocamlpool::ocaml_ffi_with_arena; use ocamlrep_ocamlpool::Bump; use oxidized::global_options::GlobalOptions; use oxidized::naming_types; use oxidized_by_ref::shallow_decl_defs; use pos::RelativePath; use pos::RelativePathCtx; use rust_provider_backend_api::RustProviderBackend; use ty::decl; use ty::reason::BReason; use ty::reason::NReason; pub enum BackendWrapper { Positioned(Arc<dyn RustProviderBackend<BReason>>), PositionFree(Arc<dyn RustProviderBackend<NReason>>), } impl BackendWrapper { pub fn positioned(backend: Arc<dyn RustProviderBackend<BReason>>) -> Custom<Self> { Custom::from(Self::Positioned(backend)) } pub fn position_free(backend: Arc<dyn RustProviderBackend<NReason>>) -> Custom<Self> { Custom::from(Self::PositionFree(backend)) } fn as_hh_server_backend(&self) -> Option<&HhServerProviderBackend> { match self { Self::Positioned(backend) => backend.as_any().downcast_ref(), Self::PositionFree(..) => None, } } pub fn file_provider(&self) -> &dyn file_provider::FileProvider { match self { Self::Positioned(backend) => backend.file_provider(), Self::PositionFree(backend) => backend.file_provider(), } } pub fn naming_provider(&self) -> &dyn naming_provider::NamingProvider { match self { Self::Positioned(backend) => backend.naming_provider(), Self::PositionFree(backend) => backend.naming_provider(), } } } impl ocamlrep_custom::CamlSerialize for BackendWrapper { ocamlrep_custom::caml_serialize_default_impls!(); fn serialize(&self) -> Vec<u8> { let backend = self .as_hh_server_backend() .expect("only HhServerProviderBackend can be serialized"); let config: Config = backend.config(); bincode::serialize(&config).unwrap() } fn deserialize(data: &[u8]) -> Self { let config: Config = bincode::deserialize(data).unwrap(); BackendWrapper::Positioned(Arc::new(HhServerProviderBackend::new(config).unwrap())) } } type Backend = Custom<BackendWrapper>; ocaml_ffi! { fn hh_rust_provider_backend_register_custom_types() { use ocamlrep_custom::CamlSerialize; // Safety: The OCaml runtime is currently interrupted by a call into // this function, so it's safe to interact with it. unsafe { BackendWrapper::register(); } } fn hh_rust_provider_backend_make( root: PathBuf, hhi_root: PathBuf, tmp: PathBuf, opts: GlobalOptions, ) -> Backend { let path_ctx = RelativePathCtx { root, hhi: hhi_root, tmp, ..Default::default() }; let backend = Arc::new(HhServerProviderBackend::new(Config { path_ctx, opts, db_path: None, }).unwrap()); BackendWrapper::positioned(backend) } } const UNIMPLEMENTED_MESSAGE: &str = "RustProviderBackend impls other than HhServerProviderBackend \ only support the minimum functionality necessary for typechecking a file. \ This API is not supported."; ocaml_ffi! { fn hh_rust_provider_backend_push_local_changes(backend: Backend) { if let Some(backend) = backend.as_hh_server_backend() { backend.push_local_changes(); } else { unimplemented!("push_local_changes: {UNIMPLEMENTED_MESSAGE}"); } } fn hh_rust_provider_backend_pop_local_changes(backend: Backend) { if let Some(backend) = backend.as_hh_server_backend() { backend.pop_local_changes(); } else { unimplemented!("pop_local_changes: {UNIMPLEMENTED_MESSAGE}"); } } fn hh_rust_provider_backend_set_ctx_empty(backend: Backend, is_empty: bool) { if let Some(backend) = backend.as_hh_server_backend() { backend.set_ctx_empty(is_empty); } } } // Decl_provider //////////////////////////////////////////////////////////// ocaml_ffi_with_arena! { fn hh_rust_provider_backend_direct_decl_parse_and_cache<'a>( arena: &'a Bump, backend: UnsafeOcamlPtr, path: RelativePath, text: UnsafeOcamlPtr, ) -> rust_decl_ffi::OcamlParsedFileWithHashes<'a> { let backend = unsafe { get_backend(backend) }; let backend = match backend.as_hh_server_backend() { Some(backend) => backend, None => unimplemented!("direct_decl_parse_and_cache: {UNIMPLEMENTED_MESSAGE}"), }; // SAFETY: Borrow the contents of the source file from the value on the // OCaml heap rather than copying it over. This is safe as long as we // don't call into OCaml within this function scope. let text_value: ocamlrep::Value<'a> = unsafe { text.as_value() }; let text = ocamlrep::bytes_from_ocamlrep(text_value).expect("expected string"); backend.parse_and_cache_decls(path, text, arena).unwrap().into() } fn hh_rust_provider_backend_add_shallow_decls<'a>( arena: &'a Bump, backend: UnsafeOcamlPtr, decls: &[&(&'a str, shallow_decl_defs::Decl<'a>)], ) { let backend = unsafe { get_backend(backend) }; let backend = match backend.as_hh_server_backend() { Some(backend) => backend, None => unimplemented!("add_shallow_decls: {UNIMPLEMENTED_MESSAGE}"), }; backend.add_decls(decls).unwrap(); } } // UnsafeOcamlPtr is used because ocamlrep_custom::Custom cannot be used with // ocaml_ffi_with_arena (it does not implement FromOcamlRepIn, and shouldn't, // since arena-allocating a Custom would result in failing to decrement the // inner Rc and leaking memory). unsafe fn get_backend(ptr: UnsafeOcamlPtr) -> Backend { Backend::from_ocamlrep(ptr.as_value()).unwrap() } // NB: this function interacts with the OCaml runtime (but won't trigger a GC). fn to_ocaml<T: ToOcamlRep + ?Sized>(value: &T) -> UnsafeOcamlPtr { // SAFETY: this module doesn't do any concurrent interaction with the OCaml // runtime while invoking this function unsafe { UnsafeOcamlPtr::new(ocamlrep_ocamlpool::to_ocaml(value)) } } ocaml_ffi! { fn hh_rust_provider_backend_get_fun( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_fun(name) } { return to_ocaml(&opt); } } let name = pos::FunName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::FunDecl<BReason>>> = backend.shallow_decl_provider() .get_fun(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::FunDecl<NReason>>> = backend.shallow_decl_provider() .get_fun(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_get_shallow_class( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_shallow_class(name) } { return to_ocaml(&opt); } } let name = pos::TypeName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::ShallowClass<BReason>>> = backend.shallow_decl_provider() .get_class(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::ShallowClass<NReason>>> = backend.shallow_decl_provider() .get_class(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_get_typedef( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_typedef(name) } { return to_ocaml(&opt); } } let name = pos::TypeName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::TypedefDecl<BReason>>> = backend.folded_decl_provider() .get_typedef(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::TypedefDecl<NReason>>> = backend.folded_decl_provider() .get_typedef(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_get_gconst( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_const(name) } { return to_ocaml(&opt); } } let name = pos::ConstName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::ConstDecl<BReason>>> = backend.shallow_decl_provider() .get_const(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::ConstDecl<NReason>>> = backend.shallow_decl_provider() .get_const(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_get_module( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_module(name) } { return to_ocaml(&opt); } } let name = pos::ModuleName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::ModuleDecl<BReason>>> = backend.shallow_decl_provider() .get_module(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::ModuleDecl<NReason>>> = backend.shallow_decl_provider() .get_module(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_get_folded_class( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { if let opt @ Some(_) = unsafe { backend.get_ocaml_folded_class(name) } { return to_ocaml(&opt); } } let name = pos::TypeName::from(std::str::from_utf8(name).unwrap()); match &*backend { BackendWrapper::Positioned(backend) => { let res: Option<Arc<decl::FoldedClass<BReason>>> = backend.folded_decl_provider() .get_class(name) .unwrap(); to_ocaml(&res) } BackendWrapper::PositionFree(backend) => { let res: Option<Arc<decl::FoldedClass<NReason>>> = backend.folded_decl_provider() .get_class(name) .unwrap(); to_ocaml(&res) } } } fn hh_rust_provider_backend_declare_folded_class( backend: Backend, name: pos::TypeName, ) { match &*backend { BackendWrapper::Positioned(backend) => { backend.folded_decl_provider().get_class(name).unwrap(); } BackendWrapper::PositionFree(backend) => { backend.folded_decl_provider().get_class(name).unwrap(); } } } fn hh_rust_provider_backend_oldify_defs( backend: Backend, names: file_info::Names, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.oldify_defs(&names).unwrap() } else { unimplemented!("oldify_defs: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_remove_old_defs( backend: Backend, names: file_info::Names, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.remove_old_defs(&names).unwrap() } else { unimplemented!("remove_old_defs: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_remove_defs( backend: Backend, names: file_info::Names, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.remove_defs(&names).unwrap() } else { unimplemented!("remove_defs: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_get_old_defs( backend: Backend, names: file_info::Names, ) -> ( BTreeMap<pos::TypeName, Option<Arc<decl::ShallowClass<BReason>>>>, BTreeMap<pos::FunName, Option<Arc<decl::FunDecl<BReason>>>>, BTreeMap<pos::TypeName, Option<Arc<decl::TypedefDecl<BReason>>>>, BTreeMap<pos::ConstName, Option<Arc<decl::ConstDecl<BReason>>>>, BTreeMap<pos::ModuleName, Option<Arc<decl::ModuleDecl<BReason>>>>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.get_old_defs(&names).unwrap() } else { unimplemented!("get_old_defs: {UNIMPLEMENTED_MESSAGE}") } } } // File_provider //////////////////////////////////////////////////////////// ocaml_ffi! { fn hh_rust_provider_backend_file_provider_get( backend: Backend, path: RelativePath, ) -> Option<FileType> { if let Some(backend) = backend.as_hh_server_backend() { backend.file_store().get(path).unwrap() } else { // NB: This is semantically different than the above. We'll read // from disk instead of returning None for files that aren't already // present in our file store (i.e., the in-memory cache). We'll // return Some("") instead of None for files that aren't present on // disk. Some(FileType::Disk(backend.file_provider().get(path).unwrap())) } } fn hh_rust_provider_backend_file_provider_get_contents( backend: Backend, path: RelativePath, ) -> bstr::BString { backend.file_provider().get(path).unwrap() } fn hh_rust_provider_backend_file_provider_provide_file_for_tests( backend: Backend, path: RelativePath, contents: bstr::BString, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.file_store().insert(path, FileType::Disk(contents)).unwrap(); } else { unimplemented!("provide_file_for_tests: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_file_provider_provide_file_for_ide( backend: Backend, path: RelativePath, contents: bstr::BString, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.file_store().insert(path, FileType::Ide(contents)).unwrap(); } else { unimplemented!("provide_file_for_ide: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_file_provider_provide_file_hint( backend: Backend, path: RelativePath, file: FileType, ) { if let Some(backend) = backend.as_hh_server_backend() { if let FileType::Ide(_) = file { backend.file_store().insert(path, file).unwrap(); } } else { unimplemented!("provide_file_hint: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_file_provider_remove_batch( backend: Backend, paths: BTreeSet<RelativePath>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.file_store().remove_batch(&mut paths.into_iter()).unwrap(); } else { unimplemented!("file_provider_remove_batch: {UNIMPLEMENTED_MESSAGE}") } } } // Naming_provider ////////////////////////////////////////////////////////// ocaml_ffi! { fn hh_rust_provider_backend_naming_types_add( backend: Backend, name: pos::TypeName, pos: (file_info::Pos, naming_types::KindOfType), ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().add_type(name, &pos).unwrap(); } else { unimplemented!("naming_types_add: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_types_get_pos( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { let ocaml_value = if let Some(opt) = unsafe { backend.get_ocaml_type_pos(name) } { // Subtle: `get_ocaml_*_pos` returns `Option<UnsafeOcamlPtr>` where // the `UnsafeOcamlPtr` is a value of OCaml type `FileInfo.pos option`. // We want to just convert `opt` to an OCaml value here, not // `Some(opt)` (as we do for the decl getter FFIs). to_ocaml(&opt) } else { let name = pos::TypeName::from(std::str::from_utf8(name).unwrap()); to_ocaml(&backend.naming_table_with_context().get_type_pos(name).unwrap()) }; return ocaml_value; } let name = pos::TypeName::from(std::str::from_utf8(name).unwrap()); let res: Option<(file_info::Pos, naming_types::KindOfType)> = backend.naming_provider() .get_type_path_and_kind(name).unwrap() .map(|(path, kind)| { ( file_info::Pos::File(kind.into(), Arc::new(path.into())), kind, ) }); to_ocaml(&res) } fn hh_rust_provider_backend_naming_types_remove_batch( backend: Backend, names: Vec<pos::TypeName>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().remove_type_batch(&names).unwrap(); } else { unimplemented!("naming_types_remove_batch: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_types_get_canon_name( backend: Backend, name: pos::TypeName, ) -> Option<pos::TypeName> { backend.naming_provider().get_canon_type_name(name).unwrap() } fn hh_rust_provider_backend_naming_funs_add( backend: Backend, name: pos::FunName, pos: file_info::Pos, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().add_fun(name, &pos).unwrap(); } else { unimplemented!("naming_funs_add: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_funs_get_pos( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { let ocaml_value = if let Some(opt) = unsafe { backend.get_ocaml_fun_pos(name) } { to_ocaml(&opt) } else { let name = pos::FunName::from(std::str::from_utf8(name).unwrap()); to_ocaml(&backend.naming_table_with_context().get_fun_pos(name).unwrap()) }; return ocaml_value; } let name = pos::FunName::from(std::str::from_utf8(name).unwrap()); let res: Option<file_info::Pos> = backend.naming_provider() .get_fun_path(name).unwrap() .map(|path| file_info::Pos::File(file_info::NameType::Fun, Arc::new(path.into()))); to_ocaml(&res) } fn hh_rust_provider_backend_naming_funs_remove_batch( backend: Backend, names: Vec<pos::FunName>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().remove_fun_batch(&names).unwrap(); } else { unimplemented!("naming_funs_remove_batch: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_funs_get_canon_name( backend: Backend, name: pos::FunName, ) -> Option<pos::FunName> { backend.naming_provider().get_canon_fun_name(name).unwrap() } fn hh_rust_provider_backend_naming_consts_add( backend: Backend, name: pos::ConstName, pos: file_info::Pos, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().add_const(name, &pos).unwrap(); } else { unimplemented!("naming_consts_add: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_consts_get_pos( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { let ocaml_value = if let Some(opt) = unsafe { backend.get_ocaml_const_pos(name) } { to_ocaml(&opt) } else { let name = pos::ConstName::from(std::str::from_utf8(name).unwrap()); to_ocaml(&backend.naming_table_with_context().get_const_pos(name).unwrap()) }; return ocaml_value; } let name = pos::ConstName::from(std::str::from_utf8(name).unwrap()); let res: Option<file_info::Pos> = backend.naming_provider() .get_const_path(name).unwrap() .map(|path| file_info::Pos::File(file_info::NameType::Const, Arc::new(path.into()))); to_ocaml(&res) } fn hh_rust_provider_backend_naming_consts_remove_batch( backend: Backend, names: Vec<pos::ConstName>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().remove_const_batch(&names).unwrap(); } else { unimplemented!("naming_consts_remove_batch: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_modules_add( backend: Backend, name: pos::ModuleName, pos: file_info::Pos, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().add_module(name, &pos).unwrap(); } else { unimplemented!("naming_modules_add: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_modules_get_pos( backend: Backend, name: UnsafeOcamlPtr, ) -> UnsafeOcamlPtr { // SAFETY: We have to make sure not to use this value after calling into // the OCaml runtime (e.g. after invoking `backend.get_ocaml_*`). let name = unsafe { name.as_value().as_byte_string().unwrap() }; if let Some(backend) = backend.as_hh_server_backend() { let ocaml_value = if let Some(opt) = unsafe { backend.get_ocaml_module_pos(name) } { to_ocaml(&opt) } else { let name = pos::ModuleName::from(std::str::from_utf8(name).unwrap()); to_ocaml(&backend.naming_table_with_context().get_module_pos(name).unwrap()) }; return ocaml_value; } let name = pos::ModuleName::from(std::str::from_utf8(name).unwrap()); let res: Option<file_info::Pos> = backend.naming_provider() .get_module_path(name).unwrap() .map(|path| file_info::Pos::File(file_info::NameType::Module, Arc::new(path.into()))); to_ocaml(&res) } fn hh_rust_provider_backend_naming_modules_remove_batch( backend: Backend, names: Vec<pos::ModuleName>, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().remove_module_batch(&names).unwrap(); } else { unimplemented!("naming_modules_remove_batch: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_get_db_path( backend: Backend, ) -> Option<PathBuf> { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().db_path() } else { unimplemented!("naming_get_db_path: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_set_db_path( backend: Backend, db_path: PathBuf, ) { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().set_db_path(db_path).unwrap() } else { unimplemented!("naming_set_db_path: {UNIMPLEMENTED_MESSAGE}") } } fn hh_rust_provider_backend_naming_get_filenames_by_hash( backend: Backend, deps: Custom<deps_rust::DepSet>, ) -> std::collections::BTreeSet<RelativePath> { if let Some(backend) = backend.as_hh_server_backend() { backend.naming_table().get_filenames_by_hash(&deps).unwrap() } else { unimplemented!("naming_get_filenames_by_hash: {UNIMPLEMENTED_MESSAGE}") } } }
OCaml
hhvm/hphp/hack/src/providers/shallow_classes_provider.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude open Shallow_decl_defs let direct_decl_parse ctx filename name = match Direct_decl_utils.direct_decl_parse ctx filename with | None -> Decl_defs.raise_decl_not_found (Some filename) name | Some parsed_file -> parsed_file.Direct_decl_utils.pfh_decls let direct_decl_parse_and_cache ctx filename name = match Direct_decl_utils.direct_decl_parse_and_cache ctx filename with | None -> Decl_defs.raise_decl_not_found (Some filename) name | Some parsed_file -> parsed_file.Direct_decl_utils.pfh_decls let fetch_remote_old_decl_flag (ctx : Provider_context.t) = TypecheckerOptions.fetch_remote_old_decls (Provider_context.get_tcopt ctx) let fetch_remote_old_decls ctx ~during_init = fetch_remote_old_decl_flag ctx && during_init let get (ctx : Provider_context.t) (name : string) : shallow_class option = let find_in_direct_decl_parse ~fill_caches path = let f = if fill_caches then direct_decl_parse_and_cache else direct_decl_parse in f ctx path name |> List.find_map ~f:(function | (n, Shallow_decl_defs.Class decl, _) when String.equal name n -> Some decl | _ -> None) in match Provider_context.get_backend ctx with | Provider_backend.Analysis -> (match Shallow_classes_heap.Classes.get name with | Some _ as decl_opt -> decl_opt | None -> failwith (Printf.sprintf "failed to get shallow class %S" name)) | Provider_backend.Rust_provider_backend backend -> Rust_provider_backend.Decl.get_shallow_class backend (Naming_provider.rust_backend_ctx_proxy ctx) name | Provider_backend.Pessimised_shared_memory info -> (match Shallow_classes_heap.Classes.get name with | Some _ as decl_opt -> decl_opt | None -> (match Naming_provider.get_class_path ctx name with | None -> None | Some path -> let open Option.Let_syntax in let* original_sc = find_in_direct_decl_parse ~fill_caches:false path in let sc = info.Provider_backend.pessimise_shallow_class path ~name original_sc in if info.Provider_backend.store_pessimised_result then Shallow_classes_heap.Classes.add name sc; Some sc)) | Provider_backend.Shared_memory -> (match Shallow_classes_heap.Classes.get name with | Some _ as decl_opt -> decl_opt | None -> (match Naming_provider.get_class_path ctx name with | None -> None | Some path -> find_in_direct_decl_parse ~fill_caches:true path)) | Provider_backend.Local_memory { Provider_backend.shallow_decl_cache; _ } -> Provider_backend.Shallow_decl_cache.find_or_add shallow_decl_cache ~key:(Provider_backend.Shallow_decl_cache_entry.Shallow_class_decl name) ~default:(fun () -> match Naming_provider.get_class_path ctx name with | None -> None | Some path -> find_in_direct_decl_parse ~fill_caches:true path) | Provider_backend.Decl_service { decl; _ } -> Decl_service_client.rpc_get_class decl name let get_batch (ctx : Provider_context.t) (names : SSet.t) : shallow_class option SMap.t = match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Rust_provider_backend be -> SSet.fold (fun name acc -> SMap.add name (Rust_provider_backend.Decl.get_shallow_class be (Naming_provider.rust_backend_ctx_proxy ctx) name) acc) names SMap.empty | Provider_backend.Shared_memory -> Shallow_classes_heap.Classes.get_batch names | Provider_backend.Local_memory _ -> failwith "get_batch not implemented for Local_memory" | Provider_backend.Decl_service _ -> failwith "get_batch not implemented for Decl_service" let fetch_missing_old_classes_remotely ctx ~during_init old_classes = if fetch_remote_old_decls ctx ~during_init then let missing_old_classes = SMap.filter (fun _key -> Option.is_none) old_classes |> SMap.keys in let remote_old_classes = Remote_old_decl_client.fetch_old_decls ~ctx missing_old_classes in SMap.union old_classes remote_old_classes ~combine:(fun _key decl1 decl2 -> Some (Option.first_some decl1 decl2)) else old_classes let get_old_batch (ctx : Provider_context.t) ~during_init (names : SSet.t) : shallow_class option SMap.t = match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Analysis -> failwith "invalid" (* TODO(sf, 2022-10-20): Reduce duplication between the following two cases. *) | Provider_backend.Rust_provider_backend be -> let (old_classes, _funs, _typedefs, _consts, _modules) = Rust_provider_backend.Decl.get_old_defs be FileInfo.{ empty_names with n_classes = names } in fetch_missing_old_classes_remotely ctx ~during_init old_classes | Provider_backend.Shared_memory -> let old_classes = Shallow_classes_heap.Classes.get_old_batch names in fetch_missing_old_classes_remotely ctx ~during_init old_classes | Provider_backend.Local_memory _ -> failwith "get_old_batch not implemented for Local_memory" | Provider_backend.Decl_service _ -> failwith "get_old_batch not implemented for Decl_service" let oldify_batch (ctx : Provider_context.t) (names : SSet.t) : unit = match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Rust_provider_backend _ | Provider_backend.Shared_memory -> Shallow_classes_heap.Classes.oldify_batch names | Provider_backend.Local_memory _ -> failwith "oldify_batch not implemented for Local_memory" | Provider_backend.Decl_service _ -> failwith "oldify_batch not implemented for Decl_service" let remove_old_batch (ctx : Provider_context.t) (names : SSet.t) : unit = match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ | Provider_backend.Analysis -> failwith "invalid" | Provider_backend.Rust_provider_backend _ | Provider_backend.Shared_memory -> Shallow_classes_heap.Classes.remove_old_batch names | Provider_backend.Local_memory _ -> failwith "remove_old_batch not implemented for Local_memory" | Provider_backend.Decl_service _ -> failwith "remove_old_batch not implemented for Decl_service" let remove_batch (ctx : Provider_context.t) (names : SSet.t) : unit = match Provider_context.get_backend ctx with | Provider_backend.Pessimised_shared_memory _ -> failwith "invalid" | Provider_backend.Analysis | Provider_backend.Rust_provider_backend _ | Provider_backend.Shared_memory -> Shallow_classes_heap.Classes.remove_batch names | Provider_backend.Local_memory _ -> failwith "remove_batch not implemented for Local_memory" | Provider_backend.Decl_service _ -> failwith "remove_batch not implemented for Decl_service" let local_changes_push_sharedmem_stack () : unit = Shallow_classes_heap.Classes.LocalChanges.push_stack () let local_changes_pop_sharedmem_stack () : unit = Shallow_classes_heap.Classes.LocalChanges.pop_stack ()
OCaml Interface
hhvm/hphp/hack/src/providers/shallow_classes_provider.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Shallow_decl_defs (** Return the shallow declaration of the class with the given name if it is present in the cache. Otherwise, compute it, store it in the cache, and return it. *) val get : Provider_context.t -> string -> shallow_class option val get_batch : Provider_context.t -> SSet.t -> shallow_class option SMap.t val get_old_batch : Provider_context.t -> during_init:bool -> SSet.t -> shallow_class option SMap.t val oldify_batch : Provider_context.t -> SSet.t -> unit val remove_old_batch : Provider_context.t -> SSet.t -> unit val remove_batch : Provider_context.t -> SSet.t -> unit val local_changes_push_sharedmem_stack : unit -> unit val local_changes_pop_sharedmem_stack : unit -> unit
OCaml
hhvm/hphp/hack/src/providers/tast_provider.ml
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude module Compute_tast = struct type t = { tast: Tast.program Tast_with_dynamic.t; telemetry: Telemetry.t; } end module Compute_tast_and_errors = struct type t = { tast: Tast.program Tast_with_dynamic.t; errors: Errors.t; telemetry: Telemetry.t; } end type _ compute_tast_mode = | Compute_tast_only : Compute_tast.t compute_tast_mode | Compute_tast_and_errors : Compute_tast_and_errors.t compute_tast_mode let compute_tast_and_errors_unquarantined_internal (type a) ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) ~(mode : a compute_tast_mode) : a = match (mode, entry.Provider_context.tast, entry.Provider_context.all_errors) with | (Compute_tast_only, Some tast, _) -> { Compute_tast.tast; telemetry = Telemetry.create () } | (Compute_tast_and_errors, Some tast, Some errors) -> { Compute_tast_and_errors.tast; errors; telemetry = Telemetry.create () } | (_, _, _) -> (* prepare logging *) Provider_context.reset_telemetry ctx; let prev_ctx_telemetry = Provider_context.get_telemetry ctx in let prev_gc_telemetry = Telemetry.quick_gc_stat () in Decl_counters.set_mode HackEventLogger.PerFileProfilingConfig.DeclingTopCounts; let prev_tally_state = Counters.reset () in let start_time = Unix.gettimeofday () in (* do the work *) let ({ Parser_return.ast; _ }, ast_errors) = Ast_provider.compute_parser_return_and_ast_errors ~popt:(Provider_context.get_popt ctx) ~entry in let (naming_errors, nast) = Errors.do_with_context entry.Provider_context.path (fun () -> Naming.program ctx ast) in let (typing_errors, tast) = let do_tast_checks = match mode with | Compute_tast_only -> false | Compute_tast_and_errors -> true in Errors.do_with_context entry.Provider_context.path (fun () -> Typing_toplevel.nast_to_tast ~do_tast_checks ctx nast) in (* Logging... *) let ctx_telemetry = if Hh_logger.Level.passes_min_level Hh_logger.Level.Debug then Provider_context.get_telemetry ctx |> Telemetry.diff ~all:true ~prev:prev_ctx_telemetry else Provider_context.get_telemetry ctx in let gc_telemetry = if Hh_logger.Level.passes_min_level Hh_logger.Level.Debug then Telemetry.quick_gc_stat () |> Telemetry.diff ~all:true ~prev:prev_gc_telemetry else Telemetry.quick_gc_stat () in let telemetry = Counters.get_counters () in Counters.restore_state prev_tally_state; let telemetry = telemetry |> Telemetry.object_ ~key:"ctx" ~value:ctx_telemetry |> Telemetry.object_ ~key:"gc" ~value:gc_telemetry |> Telemetry.int_ ~key:"errors.ast" ~value:(Errors.count ast_errors) |> Telemetry.int_ ~key:"errors.nast" ~value:(Errors.count naming_errors) |> Telemetry.int_ ~key:"errors.tast" ~value:(Errors.count typing_errors) |> Telemetry.float_ ~key:"duration_decl_and_typecheck" ~value:(Unix.gettimeofday () -. start_time) |> Telemetry.int_ ~key:"filesize" ~value: (String.length (Provider_context.get_file_contents_if_present entry |> Option.value ~default:"")) in Hh_logger.debug "compute_tast: %s\n%s" (Relative_path.suffix entry.Provider_context.path) (Telemetry.to_string telemetry); HackEventLogger.ProfileTypeCheck.compute_tast ~telemetry ~path:entry.Provider_context.path ~start_time; (match mode with | Compute_tast_and_errors -> let errors = naming_errors |> Errors.merge typing_errors |> Errors.merge ast_errors in entry.Provider_context.tast <- Some tast; entry.Provider_context.all_errors <- Some errors; { Compute_tast_and_errors.tast; errors; telemetry } | Compute_tast_only -> entry.Provider_context.tast <- Some tast; { Compute_tast.tast; telemetry }) let compute_tast_and_errors_unquarantined ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) : Compute_tast_and_errors.t = compute_tast_and_errors_unquarantined_internal ~ctx ~entry ~mode:Compute_tast_and_errors let compute_tast_unquarantined ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) : Compute_tast.t = compute_tast_and_errors_unquarantined_internal ~ctx ~entry ~mode:Compute_tast_only let compute_tast_and_errors_quarantined ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) : Compute_tast_and_errors.t = (* If results have already been memoized, don't bother quarantining anything *) match (entry.Provider_context.tast, entry.Provider_context.all_errors) with | (Some tast, Some errors) -> { Compute_tast_and_errors.tast; errors; telemetry = Telemetry.create () } (* Okay, we don't have memoized results, let's ensure we are quarantined before computing *) | _ -> let f () = compute_tast_and_errors_unquarantined ~ctx ~entry in (match Provider_context.is_quarantined () with | false -> Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f | true -> f ()) let compute_tast_quarantined ~(ctx : Provider_context.t) ~(entry : Provider_context.entry) : Compute_tast.t = (* If results have already been memoized, don't bother quarantining anything *) match entry.Provider_context.tast with | Some tast -> { Compute_tast.tast; telemetry = Telemetry.create () } (* Okay, we don't have memoized results, let's ensure we are quarantined before computing *) | None -> let f () = compute_tast_and_errors_unquarantined_internal ~ctx ~entry ~mode:Compute_tast_only in (match Provider_context.is_quarantined () with | false -> Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f | true -> f ())
OCaml Interface
hhvm/hphp/hack/src/providers/tast_provider.mli
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) module Compute_tast : sig type t = { tast: Tast.program Tast_with_dynamic.t; telemetry: Telemetry.t; } end module Compute_tast_and_errors : sig type t = { tast: Tast.program Tast_with_dynamic.t; errors: Errors.t; telemetry: Telemetry.t; } end (** Computes TAST and error-list (other than "name already bound" errors) by taking the AST in a context entry, and typechecking it, and memoizing the result (caching the results in the context entry). CAUTION: this function doesn't use a quarantine, and so is inappropriate for IDE scenarios. *) val compute_tast_and_errors_unquarantined : ctx:Provider_context.t -> entry:Provider_context.entry -> Compute_tast_and_errors.t (** Same as [compute_tast_and_errors_unquarantined], but skips computing the full error list. If the errors are needed at a later time, you'll have to incur the full cost of recomputing the entire TAST and errors. *) val compute_tast_unquarantined : ctx:Provider_context.t -> entry:Provider_context.entry -> Compute_tast.t (** This function computes TAST and error-list. At the moment, the suffix "quarantined" means that this function enforces a quarantine in case one isn't yet in force. In future, it might mean that we assert that a quarantine is already in force. CAUTION: this function is only appropriate for IDE scenarios. *) val compute_tast_and_errors_quarantined : ctx:Provider_context.t -> entry:Provider_context.entry -> Compute_tast_and_errors.t (** Same as [compute_tast_and_errors_quarantined], but skips computing the full error list. If the errors are needed at a later time, you'll have to incur the full cost of recomputing the entire TAST and errors. *) val compute_tast_quarantined : ctx:Provider_context.t -> entry:Provider_context.entry -> Compute_tast.t
OCaml
hhvm/hphp/hack/src/providers/typedef_provider.ml
(* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) open Hh_prelude type type_key = string type typedef_decl = Typing_defs.typedef_type let find_in_direct_decl_parse ~cache_results ctx filename name extract_decl_opt = let parse_result = if cache_results then Direct_decl_utils.direct_decl_parse_and_cache ctx filename else Direct_decl_utils.direct_decl_parse ctx filename in match parse_result with | None -> Decl_defs.raise_decl_not_found (Some filename) name | Some parsed_file -> let decls = parsed_file.Direct_decl_utils.pfh_decls in List.find_map decls ~f:(function | (decl_name, decl, _) when String.equal decl_name name -> extract_decl_opt decl | _ -> None) let get_typedef (ctx : Provider_context.t) (typedef_name : type_key) : typedef_decl option = match Decl_store.((get ()).get_typedef typedef_name) with | Some c -> Some c | None -> (match Naming_provider.get_typedef_path ctx typedef_name with | Some filename -> find_in_direct_decl_parse ~cache_results:true ctx filename typedef_name Shallow_decl_defs.to_typedef_decl_opt | None -> None)
OCaml Interface
hhvm/hphp/hack/src/providers/typedef_provider.mli
(* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * *) type type_key = string type typedef_decl = Typing_defs.typedef_type val get_typedef : Provider_context.t -> type_key -> typedef_decl option val find_in_direct_decl_parse : cache_results:bool -> Provider_context.t -> Relative_path.t -> type_key -> (Shallow_decl_defs.decl -> 'a option) -> 'a option
TOML
hhvm/hphp/hack/src/providers/cargo/decl_provider_rust/Cargo.toml
# @generated by autocargo [package] name = "decl_provider_rust" version = "0.0.0" edition = "2021" [lib] path = "../../decl_provider.rs" [dependencies] oxidized_by_ref = { version = "0.0.0", path = "../../../oxidized_by_ref" }
TOML
hhvm/hphp/hack/src/providers/cargo/rust_provider_backend_ffi/Cargo.toml
# @generated by autocargo [package] name = "rust_provider_backend_ffi" version = "0.0.0" edition = "2021" [lib] path = "../../rust_provider_backend_ffi.rs" test = false doctest = false crate-type = ["lib", "staticlib"] [dependencies] bincode = "1.3.3" bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] } deps_rust = { version = "0.0.0", path = "../../../deps/cargo/deps_rust" } file_info = { version = "0.0.0", path = "../../../deps/rust/file_info" } file_provider = { version = "0.0.0", path = "../../../hackrs/file_provider/cargo/file_provider" } hackrs_provider_backend = { version = "0.0.0", path = "../../hackrs_provider_backend" } naming_provider = { version = "0.0.0", path = "../../../hackrs/naming_provider/cargo/naming_provider" } ocamlrep = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } ocamlrep_custom = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } oxidized = { version = "0.0.0", path = "../../../oxidized" } oxidized_by_ref = { version = "0.0.0", path = "../../../oxidized_by_ref" } pos = { version = "0.0.0", path = "../../../hackrs/pos/cargo/pos" } rust_decl_ffi = { version = "0.0.0", path = "../../../decl/cargo/rust_decl_ffi" } rust_provider_backend_api = { version = "0.0.0", path = "../.." } ty = { version = "0.0.0", path = "../../../hackrs/ty/cargo/ty" }
TOML
hhvm/hphp/hack/src/providers/hackrs_provider_backend/Cargo.toml
# @generated by autocargo [package] name = "hackrs_provider_backend" version = "0.0.0" edition = "2021" [lib] path = "hackrs_provider_backend.rs" [dependencies] anyhow = "1.0.71" bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] } bumpalo = { version = "3.11.1", features = ["collections"] } datastore = { version = "0.0.0", path = "../../hackrs/datastore" } decl_parser = { version = "0.0.0", path = "../../hackrs/decl_parser/cargo/decl_parser" } deps_rust = { version = "0.0.0", path = "../../deps/cargo/deps_rust" } file_info = { version = "0.0.0", path = "../../deps/rust/file_info" } file_provider = { version = "0.0.0", path = "../../hackrs/file_provider/cargo/file_provider" } folded_decl_provider = { version = "0.0.0", path = "../../hackrs/folded_decl_provider/cargo/folded_decl_provider" } hh24_types = { version = "0.0.0", path = "../../utils/hh24_types" } names = { version = "0.0.0", path = "../../naming/names_rust" } naming_provider = { version = "0.0.0", path = "../../hackrs/naming_provider/cargo/naming_provider" } ocaml_runtime = { version = "0.0.0", path = "../../utils/ocaml_runtime" } ocamlrep = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } ocamlrep_caml_builtins = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } oxidized = { version = "0.0.0", path = "../../oxidized" } oxidized_by_ref = { version = "0.0.0", path = "../../oxidized_by_ref" } parking_lot = { version = "0.12.1", features = ["send_guard"] } pos = { version = "0.0.0", path = "../../hackrs/pos/cargo/pos" } rust_provider_backend_api = { version = "0.0.0", path = ".." } serde = { version = "1.0.176", features = ["derive", "rc"] } shallow_decl_provider = { version = "0.0.0", path = "../../hackrs/shallow_decl_provider/cargo/shallow_decl_provider" } shm_store = { version = "0.0.0", path = "../../shmffi/cargo/shm_store" } ty = { version = "0.0.0", path = "../../hackrs/ty/cargo/ty" } [dev-dependencies] hh24_test = { version = "0.0.0", path = "../../utils/cargo/hh24_test" } libc = "0.2.139" maplit = "1.0" rpds = "0.11.0" shmffi = { version = "0.0.0", path = "../../shmffi/cargo/shmffi" }