naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181}
182
183impl<'a> BindingParser<'a> {
184    fn parse(
185        &mut self,
186        parser: &mut Parser,
187        lexer: &mut Lexer<'a>,
188        name: &'a str,
189        name_span: Span,
190        ctx: &mut ExpressionContext<'a, '_, '_>,
191    ) -> Result<'a, ()> {
192        match name {
193            "location" => {
194                lexer.expect(Token::Paren('('))?;
195                self.location
196                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
197                lexer.expect(Token::Paren(')'))?;
198            }
199            "builtin" => {
200                lexer.expect(Token::Paren('('))?;
201                let (raw, span) = lexer.next_ident_with_span()?;
202                self.built_in
203                    .set(conv::map_built_in(raw, span)?, name_span)?;
204                lexer.expect(Token::Paren(')'))?;
205            }
206            "interpolate" => {
207                lexer.expect(Token::Paren('('))?;
208                let (raw, span) = lexer.next_ident_with_span()?;
209                self.interpolation
210                    .set(conv::map_interpolation(raw, span)?, name_span)?;
211                if lexer.skip(Token::Separator(',')) {
212                    let (raw, span) = lexer.next_ident_with_span()?;
213                    self.sampling
214                        .set(conv::map_sampling(raw, span)?, name_span)?;
215                }
216                lexer.expect(Token::Paren(')'))?;
217            }
218
219            "invariant" => {
220                self.invariant.set(true, name_span)?;
221            }
222            "blend_src" => {
223                if !lexer
224                    .enable_extensions
225                    .contains(ImplementedEnableExtension::DualSourceBlending)
226                {
227                    return Err(Box::new(Error::EnableExtensionNotEnabled {
228                        span: name_span,
229                        kind: ImplementedEnableExtension::DualSourceBlending.into(),
230                    }));
231                }
232
233                lexer.expect(Token::Paren('('))?;
234                self.blend_src
235                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
236                lexer.expect(Token::Paren(')'))?;
237            }
238            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
239        }
240        Ok(())
241    }
242
243    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
244        match (
245            self.location.value,
246            self.built_in.value,
247            self.interpolation.value,
248            self.sampling.value,
249            self.invariant.value.unwrap_or_default(),
250            self.blend_src.value,
251        ) {
252            (None, None, None, None, false, None) => Ok(None),
253            (Some(location), None, interpolation, sampling, false, blend_src) => {
254                // Before handing over the completed `Module`, we call
255                // `apply_default_interpolation` to ensure that the interpolation and
256                // sampling have been explicitly specified on all vertex shader output and fragment
257                // shader input user bindings, so leaving them potentially `None` here is fine.
258                Ok(Some(ast::Binding::Location {
259                    location,
260                    interpolation,
261                    sampling,
262                    blend_src,
263                }))
264            }
265            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None) => {
266                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
267                    invariant,
268                })))
269            }
270            (None, Some(built_in), None, None, false, None) => {
271                Ok(Some(ast::Binding::BuiltIn(built_in)))
272            }
273            (_, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
274        }
275    }
276}
277
278pub struct Parser {
279    rules: Vec<(Rule, usize)>,
280    recursion_depth: u32,
281}
282
283impl Parser {
284    pub const fn new() -> Self {
285        Parser {
286            rules: Vec::new(),
287            recursion_depth: 0,
288        }
289    }
290
291    fn reset(&mut self) {
292        self.rules.clear();
293        self.recursion_depth = 0;
294    }
295
296    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
297        self.rules.push((rule, lexer.start_byte_offset()));
298    }
299
300    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
301        let (_, initial) = self.rules.pop().unwrap();
302        lexer.span_from(initial)
303    }
304
305    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
306        let &(_, initial) = self.rules.last().unwrap();
307        lexer.span_from(initial)
308    }
309
310    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
311        Some(
312            self.rules
313                .iter()
314                .rev()
315                .find(|&x| x.0 == rule0 || x.0 == rule1)?
316                .0,
317        )
318    }
319
320    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
321    where
322        F: FnOnce(&mut Self) -> Result<'a, R>,
323    {
324        self.recursion_depth += 1;
325        if self.recursion_depth >= 256 {
326            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
327        }
328        let ret = f(self);
329        self.recursion_depth -= 1;
330        ret
331    }
332
333    fn switch_value<'a>(
334        &mut self,
335        lexer: &mut Lexer<'a>,
336        ctx: &mut ExpressionContext<'a, '_, '_>,
337    ) -> Result<'a, ast::SwitchValue<'a>> {
338        if let Token::Word("default") = lexer.peek().0 {
339            let _ = lexer.next();
340            return Ok(ast::SwitchValue::Default);
341        }
342
343        let expr = self.general_expression(lexer, ctx)?;
344        Ok(ast::SwitchValue::Expr(expr))
345    }
346
347    /// Decide if we're looking at a construction expression, and return its
348    /// type if so.
349    ///
350    /// If the identifier `word` is a [type-defining keyword], then return a
351    /// [`ConstructorType`] value describing the type to build. Return an error
352    /// if the type is not constructible (like `sampler`).
353    ///
354    /// If `word` isn't a type name, then return `None`.
355    ///
356    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
357    /// [`ConstructorType`]: ast::ConstructorType
358    fn constructor_type<'a>(
359        &mut self,
360        lexer: &mut Lexer<'a>,
361        word: &'a str,
362        span: Span,
363        ctx: &mut ExpressionContext<'a, '_, '_>,
364    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
365        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
366            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
367        }
368
369        let partial = match word {
370            "vec2" => ast::ConstructorType::PartialVector {
371                size: crate::VectorSize::Bi,
372            },
373            "vec2i" => {
374                return Ok(Some(ast::ConstructorType::Vector {
375                    size: crate::VectorSize::Bi,
376                    ty: ctx.new_scalar(Scalar::I32),
377                    ty_span: Span::UNDEFINED,
378                }))
379            }
380            "vec2u" => {
381                return Ok(Some(ast::ConstructorType::Vector {
382                    size: crate::VectorSize::Bi,
383                    ty: ctx.new_scalar(Scalar::U32),
384                    ty_span: Span::UNDEFINED,
385                }))
386            }
387            "vec2f" => {
388                return Ok(Some(ast::ConstructorType::Vector {
389                    size: crate::VectorSize::Bi,
390                    ty: ctx.new_scalar(Scalar::F32),
391                    ty_span: Span::UNDEFINED,
392                }))
393            }
394            "vec2h" => {
395                return Ok(Some(ast::ConstructorType::Vector {
396                    size: crate::VectorSize::Bi,
397                    ty: ctx.new_scalar(Scalar::F16),
398                    ty_span: Span::UNDEFINED,
399                }))
400            }
401            "vec3" => ast::ConstructorType::PartialVector {
402                size: crate::VectorSize::Tri,
403            },
404            "vec3i" => {
405                return Ok(Some(ast::ConstructorType::Vector {
406                    size: crate::VectorSize::Tri,
407                    ty: ctx.new_scalar(Scalar::I32),
408                    ty_span: Span::UNDEFINED,
409                }))
410            }
411            "vec3u" => {
412                return Ok(Some(ast::ConstructorType::Vector {
413                    size: crate::VectorSize::Tri,
414                    ty: ctx.new_scalar(Scalar::U32),
415                    ty_span: Span::UNDEFINED,
416                }))
417            }
418            "vec3f" => {
419                return Ok(Some(ast::ConstructorType::Vector {
420                    size: crate::VectorSize::Tri,
421                    ty: ctx.new_scalar(Scalar::F32),
422                    ty_span: Span::UNDEFINED,
423                }))
424            }
425            "vec3h" => {
426                return Ok(Some(ast::ConstructorType::Vector {
427                    size: crate::VectorSize::Tri,
428                    ty: ctx.new_scalar(Scalar::F16),
429                    ty_span: Span::UNDEFINED,
430                }))
431            }
432            "vec4" => ast::ConstructorType::PartialVector {
433                size: crate::VectorSize::Quad,
434            },
435            "vec4i" => {
436                return Ok(Some(ast::ConstructorType::Vector {
437                    size: crate::VectorSize::Quad,
438                    ty: ctx.new_scalar(Scalar::I32),
439                    ty_span: Span::UNDEFINED,
440                }))
441            }
442            "vec4u" => {
443                return Ok(Some(ast::ConstructorType::Vector {
444                    size: crate::VectorSize::Quad,
445                    ty: ctx.new_scalar(Scalar::U32),
446                    ty_span: Span::UNDEFINED,
447                }))
448            }
449            "vec4f" => {
450                return Ok(Some(ast::ConstructorType::Vector {
451                    size: crate::VectorSize::Quad,
452                    ty: ctx.new_scalar(Scalar::F32),
453                    ty_span: Span::UNDEFINED,
454                }))
455            }
456            "vec4h" => {
457                return Ok(Some(ast::ConstructorType::Vector {
458                    size: crate::VectorSize::Quad,
459                    ty: ctx.new_scalar(Scalar::F16),
460                    ty_span: Span::UNDEFINED,
461                }))
462            }
463            "mat2x2" => ast::ConstructorType::PartialMatrix {
464                columns: crate::VectorSize::Bi,
465                rows: crate::VectorSize::Bi,
466            },
467            "mat2x2f" => {
468                return Ok(Some(ast::ConstructorType::Matrix {
469                    columns: crate::VectorSize::Bi,
470                    rows: crate::VectorSize::Bi,
471                    ty: ctx.new_scalar(Scalar::F32),
472                    ty_span: Span::UNDEFINED,
473                }))
474            }
475            "mat2x2h" => {
476                return Ok(Some(ast::ConstructorType::Matrix {
477                    columns: crate::VectorSize::Bi,
478                    rows: crate::VectorSize::Bi,
479                    ty: ctx.new_scalar(Scalar::F16),
480                    ty_span: Span::UNDEFINED,
481                }))
482            }
483            "mat2x3" => ast::ConstructorType::PartialMatrix {
484                columns: crate::VectorSize::Bi,
485                rows: crate::VectorSize::Tri,
486            },
487            "mat2x3f" => {
488                return Ok(Some(ast::ConstructorType::Matrix {
489                    columns: crate::VectorSize::Bi,
490                    rows: crate::VectorSize::Tri,
491                    ty: ctx.new_scalar(Scalar::F32),
492                    ty_span: Span::UNDEFINED,
493                }))
494            }
495            "mat2x3h" => {
496                return Ok(Some(ast::ConstructorType::Matrix {
497                    columns: crate::VectorSize::Bi,
498                    rows: crate::VectorSize::Tri,
499                    ty: ctx.new_scalar(Scalar::F16),
500                    ty_span: Span::UNDEFINED,
501                }))
502            }
503            "mat2x4" => ast::ConstructorType::PartialMatrix {
504                columns: crate::VectorSize::Bi,
505                rows: crate::VectorSize::Quad,
506            },
507            "mat2x4f" => {
508                return Ok(Some(ast::ConstructorType::Matrix {
509                    columns: crate::VectorSize::Bi,
510                    rows: crate::VectorSize::Quad,
511                    ty: ctx.new_scalar(Scalar::F32),
512                    ty_span: Span::UNDEFINED,
513                }))
514            }
515            "mat2x4h" => {
516                return Ok(Some(ast::ConstructorType::Matrix {
517                    columns: crate::VectorSize::Bi,
518                    rows: crate::VectorSize::Quad,
519                    ty: ctx.new_scalar(Scalar::F16),
520                    ty_span: Span::UNDEFINED,
521                }))
522            }
523            "mat3x2" => ast::ConstructorType::PartialMatrix {
524                columns: crate::VectorSize::Tri,
525                rows: crate::VectorSize::Bi,
526            },
527            "mat3x2f" => {
528                return Ok(Some(ast::ConstructorType::Matrix {
529                    columns: crate::VectorSize::Tri,
530                    rows: crate::VectorSize::Bi,
531                    ty: ctx.new_scalar(Scalar::F32),
532                    ty_span: Span::UNDEFINED,
533                }))
534            }
535            "mat3x2h" => {
536                return Ok(Some(ast::ConstructorType::Matrix {
537                    columns: crate::VectorSize::Tri,
538                    rows: crate::VectorSize::Bi,
539                    ty: ctx.new_scalar(Scalar::F16),
540                    ty_span: Span::UNDEFINED,
541                }))
542            }
543            "mat3x3" => ast::ConstructorType::PartialMatrix {
544                columns: crate::VectorSize::Tri,
545                rows: crate::VectorSize::Tri,
546            },
547            "mat3x3f" => {
548                return Ok(Some(ast::ConstructorType::Matrix {
549                    columns: crate::VectorSize::Tri,
550                    rows: crate::VectorSize::Tri,
551                    ty: ctx.new_scalar(Scalar::F32),
552                    ty_span: Span::UNDEFINED,
553                }))
554            }
555            "mat3x3h" => {
556                return Ok(Some(ast::ConstructorType::Matrix {
557                    columns: crate::VectorSize::Tri,
558                    rows: crate::VectorSize::Tri,
559                    ty: ctx.new_scalar(Scalar::F16),
560                    ty_span: Span::UNDEFINED,
561                }))
562            }
563            "mat3x4" => ast::ConstructorType::PartialMatrix {
564                columns: crate::VectorSize::Tri,
565                rows: crate::VectorSize::Quad,
566            },
567            "mat3x4f" => {
568                return Ok(Some(ast::ConstructorType::Matrix {
569                    columns: crate::VectorSize::Tri,
570                    rows: crate::VectorSize::Quad,
571                    ty: ctx.new_scalar(Scalar::F32),
572                    ty_span: Span::UNDEFINED,
573                }))
574            }
575            "mat3x4h" => {
576                return Ok(Some(ast::ConstructorType::Matrix {
577                    columns: crate::VectorSize::Tri,
578                    rows: crate::VectorSize::Quad,
579                    ty: ctx.new_scalar(Scalar::F16),
580                    ty_span: Span::UNDEFINED,
581                }))
582            }
583            "mat4x2" => ast::ConstructorType::PartialMatrix {
584                columns: crate::VectorSize::Quad,
585                rows: crate::VectorSize::Bi,
586            },
587            "mat4x2f" => {
588                return Ok(Some(ast::ConstructorType::Matrix {
589                    columns: crate::VectorSize::Quad,
590                    rows: crate::VectorSize::Bi,
591                    ty: ctx.new_scalar(Scalar::F32),
592                    ty_span: Span::UNDEFINED,
593                }))
594            }
595            "mat4x2h" => {
596                return Ok(Some(ast::ConstructorType::Matrix {
597                    columns: crate::VectorSize::Quad,
598                    rows: crate::VectorSize::Bi,
599                    ty: ctx.new_scalar(Scalar::F16),
600                    ty_span: Span::UNDEFINED,
601                }))
602            }
603            "mat4x3" => ast::ConstructorType::PartialMatrix {
604                columns: crate::VectorSize::Quad,
605                rows: crate::VectorSize::Tri,
606            },
607            "mat4x3f" => {
608                return Ok(Some(ast::ConstructorType::Matrix {
609                    columns: crate::VectorSize::Quad,
610                    rows: crate::VectorSize::Tri,
611                    ty: ctx.new_scalar(Scalar::F32),
612                    ty_span: Span::UNDEFINED,
613                }))
614            }
615            "mat4x3h" => {
616                return Ok(Some(ast::ConstructorType::Matrix {
617                    columns: crate::VectorSize::Quad,
618                    rows: crate::VectorSize::Tri,
619                    ty: ctx.new_scalar(Scalar::F16),
620                    ty_span: Span::UNDEFINED,
621                }))
622            }
623            "mat4x4" => ast::ConstructorType::PartialMatrix {
624                columns: crate::VectorSize::Quad,
625                rows: crate::VectorSize::Quad,
626            },
627            "mat4x4f" => {
628                return Ok(Some(ast::ConstructorType::Matrix {
629                    columns: crate::VectorSize::Quad,
630                    rows: crate::VectorSize::Quad,
631                    ty: ctx.new_scalar(Scalar::F32),
632                    ty_span: Span::UNDEFINED,
633                }))
634            }
635            "mat4x4h" => {
636                return Ok(Some(ast::ConstructorType::Matrix {
637                    columns: crate::VectorSize::Quad,
638                    rows: crate::VectorSize::Quad,
639                    ty: ctx.new_scalar(Scalar::F16),
640                    ty_span: Span::UNDEFINED,
641                }))
642            }
643            "array" => ast::ConstructorType::PartialArray,
644            "atomic"
645            | "binding_array"
646            | "sampler"
647            | "sampler_comparison"
648            | "texture_1d"
649            | "texture_1d_array"
650            | "texture_2d"
651            | "texture_2d_array"
652            | "texture_3d"
653            | "texture_cube"
654            | "texture_cube_array"
655            | "texture_multisampled_2d"
656            | "texture_multisampled_2d_array"
657            | "texture_depth_2d"
658            | "texture_depth_2d_array"
659            | "texture_depth_cube"
660            | "texture_depth_cube_array"
661            | "texture_depth_multisampled_2d"
662            | "texture_storage_1d"
663            | "texture_storage_1d_array"
664            | "texture_storage_2d"
665            | "texture_storage_2d_array"
666            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
667            _ => return Ok(None),
668        };
669
670        // parse component type if present
671        match (lexer.peek().0, partial) {
672            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
673                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
674                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
675            }
676            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
677                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
678                Ok(Some(ast::ConstructorType::Matrix {
679                    columns,
680                    rows,
681                    ty,
682                    ty_span,
683                }))
684            }
685            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
686                lexer.expect_generic_paren('<')?;
687                let base = self.type_decl(lexer, ctx)?;
688                let size = if lexer.end_of_generic_arguments() {
689                    let expr = self.const_generic_expression(lexer, ctx)?;
690                    lexer.skip(Token::Separator(','));
691                    ast::ArraySize::Constant(expr)
692                } else {
693                    ast::ArraySize::Dynamic
694                };
695                lexer.expect_generic_paren('>')?;
696
697                Ok(Some(ast::ConstructorType::Array { base, size }))
698            }
699            (_, partial) => Ok(Some(partial)),
700        }
701    }
702
703    /// Expects `name` to be consumed (not in lexer).
704    fn arguments<'a>(
705        &mut self,
706        lexer: &mut Lexer<'a>,
707        ctx: &mut ExpressionContext<'a, '_, '_>,
708    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
709        self.push_rule_span(Rule::EnclosedExpr, lexer);
710        lexer.open_arguments()?;
711        let mut arguments = Vec::new();
712        loop {
713            if !arguments.is_empty() {
714                if !lexer.next_argument()? {
715                    break;
716                }
717            } else if lexer.skip(Token::Paren(')')) {
718                break;
719            }
720            let arg = self.general_expression(lexer, ctx)?;
721            arguments.push(arg);
722        }
723
724        self.pop_rule_span(lexer);
725        Ok(arguments)
726    }
727
728    fn enclosed_expression<'a>(
729        &mut self,
730        lexer: &mut Lexer<'a>,
731        ctx: &mut ExpressionContext<'a, '_, '_>,
732    ) -> Result<'a, Handle<ast::Expression<'a>>> {
733        self.push_rule_span(Rule::EnclosedExpr, lexer);
734        let expr = self.general_expression(lexer, ctx)?;
735        self.pop_rule_span(lexer);
736        Ok(expr)
737    }
738
739    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
740    /// Expects `name` to be consumed (not in lexer).
741    fn function_call<'a>(
742        &mut self,
743        lexer: &mut Lexer<'a>,
744        name: &'a str,
745        name_span: Span,
746        ctx: &mut ExpressionContext<'a, '_, '_>,
747    ) -> Result<'a, Handle<ast::Expression<'a>>> {
748        assert!(self.rules.last().is_some());
749
750        let expr = match name {
751            // bitcast looks like a function call, but it's an operator and must be handled differently.
752            "bitcast" => {
753                let (to, span) = self.singular_generic(lexer, ctx)?;
754
755                lexer.open_arguments()?;
756                let expr = self.general_expression(lexer, ctx)?;
757                lexer.close_arguments()?;
758
759                ast::Expression::Bitcast {
760                    expr,
761                    to,
762                    ty_span: span,
763                }
764            }
765            // everything else must be handled later, since they can be hidden by user-defined functions.
766            _ => {
767                let arguments = self.arguments(lexer, ctx)?;
768                ctx.unresolved.insert(ast::Dependency {
769                    ident: name,
770                    usage: name_span,
771                });
772                ast::Expression::Call {
773                    function: ast::Ident {
774                        name,
775                        span: name_span,
776                    },
777                    arguments,
778                }
779            }
780        };
781
782        let span = self.peek_rule_span(lexer);
783        let expr = ctx.expressions.append(expr, span);
784        Ok(expr)
785    }
786
787    fn ident_expr<'a>(
788        &mut self,
789        name: &'a str,
790        name_span: Span,
791        ctx: &mut ExpressionContext<'a, '_, '_>,
792    ) -> ast::IdentExpr<'a> {
793        match ctx.local_table.lookup(name) {
794            Some(&local) => ast::IdentExpr::Local(local),
795            None => {
796                ctx.unresolved.insert(ast::Dependency {
797                    ident: name,
798                    usage: name_span,
799                });
800                ast::IdentExpr::Unresolved(name)
801            }
802        }
803    }
804
805    fn primary_expression<'a>(
806        &mut self,
807        lexer: &mut Lexer<'a>,
808        ctx: &mut ExpressionContext<'a, '_, '_>,
809    ) -> Result<'a, Handle<ast::Expression<'a>>> {
810        self.push_rule_span(Rule::PrimaryExpr, lexer);
811        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
812            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
813        }
814        const fn literal_ray_intersection<'b>(
815            intersection: crate::RayQueryIntersection,
816        ) -> ast::Expression<'b> {
817            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
818        }
819
820        let expr = match lexer.peek() {
821            (Token::Paren('('), _) => {
822                let _ = lexer.next();
823                let expr = self.enclosed_expression(lexer, ctx)?;
824                lexer.expect(Token::Paren(')'))?;
825                self.pop_rule_span(lexer);
826                return Ok(expr);
827            }
828            (Token::Word("true"), _) => {
829                let _ = lexer.next();
830                ast::Expression::Literal(ast::Literal::Bool(true))
831            }
832            (Token::Word("false"), _) => {
833                let _ = lexer.next();
834                ast::Expression::Literal(ast::Literal::Bool(false))
835            }
836            (Token::Number(res), span) => {
837                let _ = lexer.next();
838                let num = res.map_err(|err| Error::BadNumber(span, err))?;
839
840                if let Some(enable_extension) = num.requires_enable_extension() {
841                    if !lexer.enable_extensions.contains(enable_extension) {
842                        return Err(Box::new(Error::EnableExtensionNotEnabled {
843                            kind: enable_extension.into(),
844                            span,
845                        }));
846                    }
847                }
848
849                ast::Expression::Literal(ast::Literal::Number(num))
850            }
851            (Token::Word("RAY_FLAG_NONE"), _) => {
852                let _ = lexer.next();
853                literal_ray_flag(crate::RayFlag::empty())
854            }
855            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
856                let _ = lexer.next();
857                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
858            }
859            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
860                let _ = lexer.next();
861                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
862            }
863            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
864                let _ = lexer.next();
865                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
866            }
867            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
868                let _ = lexer.next();
869                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
870            }
871            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
872                let _ = lexer.next();
873                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
874            }
875            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
876                let _ = lexer.next();
877                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
878            }
879            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
880                let _ = lexer.next();
881                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
882            }
883            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
884                let _ = lexer.next();
885                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
886            }
887            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
888                let _ = lexer.next();
889                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
890            }
891            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
892                let _ = lexer.next();
893                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
894            }
895            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
896                let _ = lexer.next();
897                literal_ray_intersection(crate::RayQueryIntersection::None)
898            }
899            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
900                let _ = lexer.next();
901                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
902            }
903            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
904                let _ = lexer.next();
905                literal_ray_intersection(crate::RayQueryIntersection::Generated)
906            }
907            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
908                let _ = lexer.next();
909                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
910            }
911            (Token::Word(word), span) => {
912                let start = lexer.start_byte_offset();
913                let _ = lexer.next();
914
915                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
916                    let ty_span = lexer.span_from(start);
917                    let components = self.arguments(lexer, ctx)?;
918                    ast::Expression::Construct {
919                        ty,
920                        ty_span,
921                        components,
922                    }
923                } else if let Token::Paren('(') = lexer.peek().0 {
924                    self.pop_rule_span(lexer);
925                    return self.function_call(lexer, word, span, ctx);
926                } else if word == "bitcast" {
927                    self.pop_rule_span(lexer);
928                    return self.function_call(lexer, word, span, ctx);
929                } else {
930                    let ident = self.ident_expr(word, span, ctx);
931                    ast::Expression::Ident(ident)
932                }
933            }
934            other => {
935                return Err(Box::new(Error::Unexpected(
936                    other.1,
937                    ExpectedToken::PrimaryExpression,
938                )))
939            }
940        };
941
942        let span = self.pop_rule_span(lexer);
943        let expr = ctx.expressions.append(expr, span);
944        Ok(expr)
945    }
946
947    fn postfix<'a>(
948        &mut self,
949        span_start: usize,
950        lexer: &mut Lexer<'a>,
951        ctx: &mut ExpressionContext<'a, '_, '_>,
952        expr: Handle<ast::Expression<'a>>,
953    ) -> Result<'a, Handle<ast::Expression<'a>>> {
954        let mut expr = expr;
955
956        loop {
957            let expression = match lexer.peek().0 {
958                Token::Separator('.') => {
959                    let _ = lexer.next();
960                    let field = lexer.next_ident()?;
961
962                    ast::Expression::Member { base: expr, field }
963                }
964                Token::Paren('[') => {
965                    let _ = lexer.next();
966                    let index = self.enclosed_expression(lexer, ctx)?;
967                    lexer.expect(Token::Paren(']'))?;
968
969                    ast::Expression::Index { base: expr, index }
970                }
971                _ => break,
972            };
973
974            let span = lexer.span_from(span_start);
975            expr = ctx.expressions.append(expression, span);
976        }
977
978        Ok(expr)
979    }
980
981    fn const_generic_expression<'a>(
982        &mut self,
983        lexer: &mut Lexer<'a>,
984        ctx: &mut ExpressionContext<'a, '_, '_>,
985    ) -> Result<'a, Handle<ast::Expression<'a>>> {
986        self.push_rule_span(Rule::GenericExpr, lexer);
987        let expr = self.general_expression(lexer, ctx)?;
988        self.pop_rule_span(lexer);
989        Ok(expr)
990    }
991
992    /// Parse a `unary_expression`.
993    fn unary_expression<'a>(
994        &mut self,
995        lexer: &mut Lexer<'a>,
996        ctx: &mut ExpressionContext<'a, '_, '_>,
997    ) -> Result<'a, Handle<ast::Expression<'a>>> {
998        self.track_recursion(|this| {
999            this.push_rule_span(Rule::UnaryExpr, lexer);
1000            //TODO: refactor this to avoid backing up
1001            let expr = match lexer.peek().0 {
1002                Token::Operation('-') => {
1003                    let _ = lexer.next();
1004                    let expr = this.unary_expression(lexer, ctx)?;
1005                    let expr = ast::Expression::Unary {
1006                        op: crate::UnaryOperator::Negate,
1007                        expr,
1008                    };
1009                    let span = this.peek_rule_span(lexer);
1010                    ctx.expressions.append(expr, span)
1011                }
1012                Token::Operation('!') => {
1013                    let _ = lexer.next();
1014                    let expr = this.unary_expression(lexer, ctx)?;
1015                    let expr = ast::Expression::Unary {
1016                        op: crate::UnaryOperator::LogicalNot,
1017                        expr,
1018                    };
1019                    let span = this.peek_rule_span(lexer);
1020                    ctx.expressions.append(expr, span)
1021                }
1022                Token::Operation('~') => {
1023                    let _ = lexer.next();
1024                    let expr = this.unary_expression(lexer, ctx)?;
1025                    let expr = ast::Expression::Unary {
1026                        op: crate::UnaryOperator::BitwiseNot,
1027                        expr,
1028                    };
1029                    let span = this.peek_rule_span(lexer);
1030                    ctx.expressions.append(expr, span)
1031                }
1032                Token::Operation('*') => {
1033                    let _ = lexer.next();
1034                    let expr = this.unary_expression(lexer, ctx)?;
1035                    let expr = ast::Expression::Deref(expr);
1036                    let span = this.peek_rule_span(lexer);
1037                    ctx.expressions.append(expr, span)
1038                }
1039                Token::Operation('&') => {
1040                    let _ = lexer.next();
1041                    let expr = this.unary_expression(lexer, ctx)?;
1042                    let expr = ast::Expression::AddrOf(expr);
1043                    let span = this.peek_rule_span(lexer);
1044                    ctx.expressions.append(expr, span)
1045                }
1046                _ => this.singular_expression(lexer, ctx)?,
1047            };
1048
1049            this.pop_rule_span(lexer);
1050            Ok(expr)
1051        })
1052    }
1053
1054    /// Parse a `lhs_expression`.
1055    ///
1056    /// LHS expressions only support the `&` and `*` operators and
1057    /// the `[]` and `.` postfix selectors.
1058    fn lhs_expression<'a>(
1059        &mut self,
1060        lexer: &mut Lexer<'a>,
1061        ctx: &mut ExpressionContext<'a, '_, '_>,
1062    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1063        self.track_recursion(|this| {
1064            this.push_rule_span(Rule::LhsExpr, lexer);
1065            let start = lexer.start_byte_offset();
1066            let expr = match lexer.peek() {
1067                (Token::Operation('*'), _) => {
1068                    let _ = lexer.next();
1069                    let expr = this.lhs_expression(lexer, ctx)?;
1070                    let expr = ast::Expression::Deref(expr);
1071                    let span = this.peek_rule_span(lexer);
1072                    ctx.expressions.append(expr, span)
1073                }
1074                (Token::Operation('&'), _) => {
1075                    let _ = lexer.next();
1076                    let expr = this.lhs_expression(lexer, ctx)?;
1077                    let expr = ast::Expression::AddrOf(expr);
1078                    let span = this.peek_rule_span(lexer);
1079                    ctx.expressions.append(expr, span)
1080                }
1081                (Token::Operation('('), _) => {
1082                    let _ = lexer.next();
1083                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1084                    lexer.expect(Token::Paren(')'))?;
1085                    this.postfix(start, lexer, ctx, primary_expr)?
1086                }
1087                (Token::Word(word), span) => {
1088                    let _ = lexer.next();
1089                    let ident = this.ident_expr(word, span, ctx);
1090                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1091                    this.postfix(start, lexer, ctx, primary_expr)?
1092                }
1093                _ => this.singular_expression(lexer, ctx)?,
1094            };
1095
1096            this.pop_rule_span(lexer);
1097            Ok(expr)
1098        })
1099    }
1100
1101    /// Parse a `singular_expression`.
1102    fn singular_expression<'a>(
1103        &mut self,
1104        lexer: &mut Lexer<'a>,
1105        ctx: &mut ExpressionContext<'a, '_, '_>,
1106    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1107        let start = lexer.start_byte_offset();
1108        self.push_rule_span(Rule::SingularExpr, lexer);
1109        let primary_expr = self.primary_expression(lexer, ctx)?;
1110        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1111        self.pop_rule_span(lexer);
1112
1113        Ok(singular_expr)
1114    }
1115
1116    fn equality_expression<'a>(
1117        &mut self,
1118        lexer: &mut Lexer<'a>,
1119        context: &mut ExpressionContext<'a, '_, '_>,
1120    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1121        // equality_expression
1122        context.parse_binary_op(
1123            lexer,
1124            |token| match token {
1125                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1126                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1127                _ => None,
1128            },
1129            // relational_expression
1130            |lexer, context| {
1131                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1132                context.parse_binary_op(
1133                    lexer,
1134                    match enclosing {
1135                        Some(Rule::GenericExpr) => |token| match token {
1136                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1137                            _ => None,
1138                        },
1139                        _ => |token| match token {
1140                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1141                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1142                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1143                            Token::LogicalOperation('>') => {
1144                                Some(crate::BinaryOperator::GreaterEqual)
1145                            }
1146                            _ => None,
1147                        },
1148                    },
1149                    // shift_expression
1150                    |lexer, context| {
1151                        context.parse_binary_op(
1152                            lexer,
1153                            match enclosing {
1154                                Some(Rule::GenericExpr) => |token| match token {
1155                                    Token::ShiftOperation('<') => {
1156                                        Some(crate::BinaryOperator::ShiftLeft)
1157                                    }
1158                                    _ => None,
1159                                },
1160                                _ => |token| match token {
1161                                    Token::ShiftOperation('<') => {
1162                                        Some(crate::BinaryOperator::ShiftLeft)
1163                                    }
1164                                    Token::ShiftOperation('>') => {
1165                                        Some(crate::BinaryOperator::ShiftRight)
1166                                    }
1167                                    _ => None,
1168                                },
1169                            },
1170                            // additive_expression
1171                            |lexer, context| {
1172                                context.parse_binary_op(
1173                                    lexer,
1174                                    |token| match token {
1175                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1176                                        Token::Operation('-') => {
1177                                            Some(crate::BinaryOperator::Subtract)
1178                                        }
1179                                        _ => None,
1180                                    },
1181                                    // multiplicative_expression
1182                                    |lexer, context| {
1183                                        context.parse_binary_op(
1184                                            lexer,
1185                                            |token| match token {
1186                                                Token::Operation('*') => {
1187                                                    Some(crate::BinaryOperator::Multiply)
1188                                                }
1189                                                Token::Operation('/') => {
1190                                                    Some(crate::BinaryOperator::Divide)
1191                                                }
1192                                                Token::Operation('%') => {
1193                                                    Some(crate::BinaryOperator::Modulo)
1194                                                }
1195                                                _ => None,
1196                                            },
1197                                            |lexer, context| self.unary_expression(lexer, context),
1198                                        )
1199                                    },
1200                                )
1201                            },
1202                        )
1203                    },
1204                )
1205            },
1206        )
1207    }
1208
1209    fn general_expression<'a>(
1210        &mut self,
1211        lexer: &mut Lexer<'a>,
1212        ctx: &mut ExpressionContext<'a, '_, '_>,
1213    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1214        self.general_expression_with_span(lexer, ctx)
1215            .map(|(expr, _)| expr)
1216    }
1217
1218    fn general_expression_with_span<'a>(
1219        &mut self,
1220        lexer: &mut Lexer<'a>,
1221        context: &mut ExpressionContext<'a, '_, '_>,
1222    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1223        self.push_rule_span(Rule::GeneralExpr, lexer);
1224        // logical_or_expression
1225        let handle = context.parse_binary_op(
1226            lexer,
1227            |token| match token {
1228                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1229                _ => None,
1230            },
1231            // logical_and_expression
1232            |lexer, context| {
1233                context.parse_binary_op(
1234                    lexer,
1235                    |token| match token {
1236                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1237                        _ => None,
1238                    },
1239                    // inclusive_or_expression
1240                    |lexer, context| {
1241                        context.parse_binary_op(
1242                            lexer,
1243                            |token| match token {
1244                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1245                                _ => None,
1246                            },
1247                            // exclusive_or_expression
1248                            |lexer, context| {
1249                                context.parse_binary_op(
1250                                    lexer,
1251                                    |token| match token {
1252                                        Token::Operation('^') => {
1253                                            Some(crate::BinaryOperator::ExclusiveOr)
1254                                        }
1255                                        _ => None,
1256                                    },
1257                                    // and_expression
1258                                    |lexer, context| {
1259                                        context.parse_binary_op(
1260                                            lexer,
1261                                            |token| match token {
1262                                                Token::Operation('&') => {
1263                                                    Some(crate::BinaryOperator::And)
1264                                                }
1265                                                _ => None,
1266                                            },
1267                                            |lexer, context| {
1268                                                self.equality_expression(lexer, context)
1269                                            },
1270                                        )
1271                                    },
1272                                )
1273                            },
1274                        )
1275                    },
1276                )
1277            },
1278        )?;
1279        Ok((handle, self.pop_rule_span(lexer)))
1280    }
1281
1282    fn variable_decl<'a>(
1283        &mut self,
1284        lexer: &mut Lexer<'a>,
1285        ctx: &mut ExpressionContext<'a, '_, '_>,
1286    ) -> Result<'a, ast::GlobalVariable<'a>> {
1287        self.push_rule_span(Rule::VariableDecl, lexer);
1288        let mut space = crate::AddressSpace::Handle;
1289
1290        if lexer.skip(Token::Paren('<')) {
1291            let (class_str, span) = lexer.next_ident_with_span()?;
1292            space = match class_str {
1293                "storage" => {
1294                    let access = if lexer.skip(Token::Separator(',')) {
1295                        lexer.next_storage_access()?
1296                    } else {
1297                        // defaulting to `read`
1298                        crate::StorageAccess::LOAD
1299                    };
1300                    crate::AddressSpace::Storage { access }
1301                }
1302                _ => conv::map_address_space(class_str, span)?,
1303            };
1304            lexer.expect(Token::Paren('>'))?;
1305        }
1306        let name = lexer.next_ident()?;
1307
1308        let ty = if lexer.skip(Token::Separator(':')) {
1309            Some(self.type_decl(lexer, ctx)?)
1310        } else {
1311            None
1312        };
1313
1314        let init = if lexer.skip(Token::Operation('=')) {
1315            let handle = self.general_expression(lexer, ctx)?;
1316            Some(handle)
1317        } else {
1318            None
1319        };
1320        lexer.expect(Token::Separator(';'))?;
1321        self.pop_rule_span(lexer);
1322
1323        Ok(ast::GlobalVariable {
1324            name,
1325            space,
1326            binding: None,
1327            ty,
1328            init,
1329        })
1330    }
1331
1332    fn struct_body<'a>(
1333        &mut self,
1334        lexer: &mut Lexer<'a>,
1335        ctx: &mut ExpressionContext<'a, '_, '_>,
1336    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1337        let mut members = Vec::new();
1338        let mut member_names = FastHashSet::default();
1339
1340        lexer.expect(Token::Paren('{'))?;
1341        let mut ready = true;
1342        while !lexer.skip(Token::Paren('}')) {
1343            if !ready {
1344                return Err(Box::new(Error::Unexpected(
1345                    lexer.next().1,
1346                    ExpectedToken::Token(Token::Separator(',')),
1347                )));
1348            }
1349            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1350            self.push_rule_span(Rule::Attribute, lexer);
1351            let mut bind_parser = BindingParser::default();
1352            while lexer.skip(Token::Attribute) {
1353                match lexer.next_ident_with_span()? {
1354                    ("size", name_span) => {
1355                        lexer.expect(Token::Paren('('))?;
1356                        let expr = self.general_expression(lexer, ctx)?;
1357                        lexer.expect(Token::Paren(')'))?;
1358                        size.set(expr, name_span)?;
1359                    }
1360                    ("align", name_span) => {
1361                        lexer.expect(Token::Paren('('))?;
1362                        let expr = self.general_expression(lexer, ctx)?;
1363                        lexer.expect(Token::Paren(')'))?;
1364                        align.set(expr, name_span)?;
1365                    }
1366                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1367                }
1368            }
1369
1370            let bind_span = self.pop_rule_span(lexer);
1371            let binding = bind_parser.finish(bind_span)?;
1372
1373            let name = lexer.next_ident()?;
1374            lexer.expect(Token::Separator(':'))?;
1375            let ty = self.type_decl(lexer, ctx)?;
1376            ready = lexer.skip(Token::Separator(','));
1377
1378            members.push(ast::StructMember {
1379                name,
1380                ty,
1381                binding,
1382                size: size.value,
1383                align: align.value,
1384            });
1385
1386            if !member_names.insert(name.name) {
1387                return Err(Box::new(Error::Redefinition {
1388                    previous: members
1389                        .iter()
1390                        .find(|x| x.name.name == name.name)
1391                        .map(|x| x.name.span)
1392                        .unwrap(),
1393                    current: name.span,
1394                }));
1395            }
1396        }
1397
1398        Ok(members)
1399    }
1400
1401    /// Parses `<T>`, returning T and span of T
1402    fn singular_generic<'a>(
1403        &mut self,
1404        lexer: &mut Lexer<'a>,
1405        ctx: &mut ExpressionContext<'a, '_, '_>,
1406    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1407        lexer.expect_generic_paren('<')?;
1408        let start = lexer.start_byte_offset();
1409        let ty = self.type_decl(lexer, ctx)?;
1410        let span = lexer.span_from(start);
1411        lexer.skip(Token::Separator(','));
1412        lexer.expect_generic_paren('>')?;
1413        Ok((ty, span))
1414    }
1415
1416    fn matrix_with_type<'a>(
1417        &mut self,
1418        lexer: &mut Lexer<'a>,
1419        ctx: &mut ExpressionContext<'a, '_, '_>,
1420        columns: crate::VectorSize,
1421        rows: crate::VectorSize,
1422    ) -> Result<'a, ast::Type<'a>> {
1423        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1424        Ok(ast::Type::Matrix {
1425            columns,
1426            rows,
1427            ty,
1428            ty_span,
1429        })
1430    }
1431
1432    fn type_decl_impl<'a>(
1433        &mut self,
1434        lexer: &mut Lexer<'a>,
1435        word: &'a str,
1436        span: Span,
1437        ctx: &mut ExpressionContext<'a, '_, '_>,
1438    ) -> Result<'a, Option<ast::Type<'a>>> {
1439        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1440            return Ok(Some(ast::Type::Scalar(scalar)));
1441        }
1442
1443        Ok(Some(match word {
1444            "vec2" => {
1445                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1446                ast::Type::Vector {
1447                    size: crate::VectorSize::Bi,
1448                    ty,
1449                    ty_span,
1450                }
1451            }
1452            "vec2i" => ast::Type::Vector {
1453                size: crate::VectorSize::Bi,
1454                ty: ctx.new_scalar(Scalar::I32),
1455                ty_span: Span::UNDEFINED,
1456            },
1457            "vec2u" => ast::Type::Vector {
1458                size: crate::VectorSize::Bi,
1459                ty: ctx.new_scalar(Scalar::U32),
1460                ty_span: Span::UNDEFINED,
1461            },
1462            "vec2f" => ast::Type::Vector {
1463                size: crate::VectorSize::Bi,
1464                ty: ctx.new_scalar(Scalar::F32),
1465                ty_span: Span::UNDEFINED,
1466            },
1467            "vec2h" => ast::Type::Vector {
1468                size: crate::VectorSize::Bi,
1469                ty: ctx.new_scalar(Scalar::F16),
1470                ty_span: Span::UNDEFINED,
1471            },
1472            "vec3" => {
1473                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1474                ast::Type::Vector {
1475                    size: crate::VectorSize::Tri,
1476                    ty,
1477                    ty_span,
1478                }
1479            }
1480            "vec3i" => ast::Type::Vector {
1481                size: crate::VectorSize::Tri,
1482                ty: ctx.new_scalar(Scalar::I32),
1483                ty_span: Span::UNDEFINED,
1484            },
1485            "vec3u" => ast::Type::Vector {
1486                size: crate::VectorSize::Tri,
1487                ty: ctx.new_scalar(Scalar::U32),
1488                ty_span: Span::UNDEFINED,
1489            },
1490            "vec3f" => ast::Type::Vector {
1491                size: crate::VectorSize::Tri,
1492                ty: ctx.new_scalar(Scalar::F32),
1493                ty_span: Span::UNDEFINED,
1494            },
1495            "vec3h" => ast::Type::Vector {
1496                size: crate::VectorSize::Tri,
1497                ty: ctx.new_scalar(Scalar::F16),
1498                ty_span: Span::UNDEFINED,
1499            },
1500            "vec4" => {
1501                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1502                ast::Type::Vector {
1503                    size: crate::VectorSize::Quad,
1504                    ty,
1505                    ty_span,
1506                }
1507            }
1508            "vec4i" => ast::Type::Vector {
1509                size: crate::VectorSize::Quad,
1510                ty: ctx.new_scalar(Scalar::I32),
1511                ty_span: Span::UNDEFINED,
1512            },
1513            "vec4u" => ast::Type::Vector {
1514                size: crate::VectorSize::Quad,
1515                ty: ctx.new_scalar(Scalar::U32),
1516                ty_span: Span::UNDEFINED,
1517            },
1518            "vec4f" => ast::Type::Vector {
1519                size: crate::VectorSize::Quad,
1520                ty: ctx.new_scalar(Scalar::F32),
1521                ty_span: Span::UNDEFINED,
1522            },
1523            "vec4h" => ast::Type::Vector {
1524                size: crate::VectorSize::Quad,
1525                ty: ctx.new_scalar(Scalar::F16),
1526                ty_span: Span::UNDEFINED,
1527            },
1528            "mat2x2" => {
1529                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1530            }
1531            "mat2x2f" => ast::Type::Matrix {
1532                columns: crate::VectorSize::Bi,
1533                rows: crate::VectorSize::Bi,
1534                ty: ctx.new_scalar(Scalar::F32),
1535                ty_span: Span::UNDEFINED,
1536            },
1537            "mat2x2h" => ast::Type::Matrix {
1538                columns: crate::VectorSize::Bi,
1539                rows: crate::VectorSize::Bi,
1540                ty: ctx.new_scalar(Scalar::F16),
1541                ty_span: Span::UNDEFINED,
1542            },
1543            "mat2x3" => {
1544                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1545            }
1546            "mat2x3f" => ast::Type::Matrix {
1547                columns: crate::VectorSize::Bi,
1548                rows: crate::VectorSize::Tri,
1549                ty: ctx.new_scalar(Scalar::F32),
1550                ty_span: Span::UNDEFINED,
1551            },
1552            "mat2x3h" => ast::Type::Matrix {
1553                columns: crate::VectorSize::Bi,
1554                rows: crate::VectorSize::Tri,
1555                ty: ctx.new_scalar(Scalar::F16),
1556                ty_span: Span::UNDEFINED,
1557            },
1558            "mat2x4" => {
1559                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1560            }
1561            "mat2x4f" => ast::Type::Matrix {
1562                columns: crate::VectorSize::Bi,
1563                rows: crate::VectorSize::Quad,
1564                ty: ctx.new_scalar(Scalar::F32),
1565                ty_span: Span::UNDEFINED,
1566            },
1567            "mat2x4h" => ast::Type::Matrix {
1568                columns: crate::VectorSize::Bi,
1569                rows: crate::VectorSize::Quad,
1570                ty: ctx.new_scalar(Scalar::F16),
1571                ty_span: Span::UNDEFINED,
1572            },
1573            "mat3x2" => {
1574                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1575            }
1576            "mat3x2f" => ast::Type::Matrix {
1577                columns: crate::VectorSize::Tri,
1578                rows: crate::VectorSize::Bi,
1579                ty: ctx.new_scalar(Scalar::F32),
1580                ty_span: Span::UNDEFINED,
1581            },
1582            "mat3x2h" => ast::Type::Matrix {
1583                columns: crate::VectorSize::Tri,
1584                rows: crate::VectorSize::Bi,
1585                ty: ctx.new_scalar(Scalar::F16),
1586                ty_span: Span::UNDEFINED,
1587            },
1588            "mat3x3" => {
1589                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1590            }
1591            "mat3x3f" => ast::Type::Matrix {
1592                columns: crate::VectorSize::Tri,
1593                rows: crate::VectorSize::Tri,
1594                ty: ctx.new_scalar(Scalar::F32),
1595                ty_span: Span::UNDEFINED,
1596            },
1597            "mat3x3h" => ast::Type::Matrix {
1598                columns: crate::VectorSize::Tri,
1599                rows: crate::VectorSize::Tri,
1600                ty: ctx.new_scalar(Scalar::F16),
1601                ty_span: Span::UNDEFINED,
1602            },
1603            "mat3x4" => {
1604                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1605            }
1606            "mat3x4f" => ast::Type::Matrix {
1607                columns: crate::VectorSize::Tri,
1608                rows: crate::VectorSize::Quad,
1609                ty: ctx.new_scalar(Scalar::F32),
1610                ty_span: Span::UNDEFINED,
1611            },
1612            "mat3x4h" => ast::Type::Matrix {
1613                columns: crate::VectorSize::Tri,
1614                rows: crate::VectorSize::Quad,
1615                ty: ctx.new_scalar(Scalar::F16),
1616                ty_span: Span::UNDEFINED,
1617            },
1618            "mat4x2" => {
1619                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1620            }
1621            "mat4x2f" => ast::Type::Matrix {
1622                columns: crate::VectorSize::Quad,
1623                rows: crate::VectorSize::Bi,
1624                ty: ctx.new_scalar(Scalar::F32),
1625                ty_span: Span::UNDEFINED,
1626            },
1627            "mat4x2h" => ast::Type::Matrix {
1628                columns: crate::VectorSize::Quad,
1629                rows: crate::VectorSize::Bi,
1630                ty: ctx.new_scalar(Scalar::F16),
1631                ty_span: Span::UNDEFINED,
1632            },
1633            "mat4x3" => {
1634                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1635            }
1636            "mat4x3f" => ast::Type::Matrix {
1637                columns: crate::VectorSize::Quad,
1638                rows: crate::VectorSize::Tri,
1639                ty: ctx.new_scalar(Scalar::F32),
1640                ty_span: Span::UNDEFINED,
1641            },
1642            "mat4x3h" => ast::Type::Matrix {
1643                columns: crate::VectorSize::Quad,
1644                rows: crate::VectorSize::Tri,
1645                ty: ctx.new_scalar(Scalar::F16),
1646                ty_span: Span::UNDEFINED,
1647            },
1648            "mat4x4" => {
1649                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1650            }
1651            "mat4x4f" => ast::Type::Matrix {
1652                columns: crate::VectorSize::Quad,
1653                rows: crate::VectorSize::Quad,
1654                ty: ctx.new_scalar(Scalar::F32),
1655                ty_span: Span::UNDEFINED,
1656            },
1657            "mat4x4h" => ast::Type::Matrix {
1658                columns: crate::VectorSize::Quad,
1659                rows: crate::VectorSize::Quad,
1660                ty: ctx.new_scalar(Scalar::F16),
1661                ty_span: Span::UNDEFINED,
1662            },
1663            "atomic" => {
1664                let scalar = lexer.next_scalar_generic()?;
1665                ast::Type::Atomic(scalar)
1666            }
1667            "ptr" => {
1668                lexer.expect_generic_paren('<')?;
1669                let (ident, span) = lexer.next_ident_with_span()?;
1670                let mut space = conv::map_address_space(ident, span)?;
1671                lexer.expect(Token::Separator(','))?;
1672                let base = self.type_decl(lexer, ctx)?;
1673                if let crate::AddressSpace::Storage { ref mut access } = space {
1674                    *access = if lexer.end_of_generic_arguments() {
1675                        let result = lexer.next_storage_access()?;
1676                        lexer.skip(Token::Separator(','));
1677                        result
1678                    } else {
1679                        crate::StorageAccess::LOAD
1680                    };
1681                }
1682                lexer.expect_generic_paren('>')?;
1683                ast::Type::Pointer { base, space }
1684            }
1685            "array" => {
1686                lexer.expect_generic_paren('<')?;
1687                let base = self.type_decl(lexer, ctx)?;
1688                let size = if lexer.end_of_generic_arguments() {
1689                    let size = self.const_generic_expression(lexer, ctx)?;
1690                    lexer.skip(Token::Separator(','));
1691                    ast::ArraySize::Constant(size)
1692                } else {
1693                    ast::ArraySize::Dynamic
1694                };
1695                lexer.expect_generic_paren('>')?;
1696
1697                ast::Type::Array { base, size }
1698            }
1699            "binding_array" => {
1700                lexer.expect_generic_paren('<')?;
1701                let base = self.type_decl(lexer, ctx)?;
1702                let size = if lexer.end_of_generic_arguments() {
1703                    let size = self.unary_expression(lexer, ctx)?;
1704                    lexer.skip(Token::Separator(','));
1705                    ast::ArraySize::Constant(size)
1706                } else {
1707                    ast::ArraySize::Dynamic
1708                };
1709                lexer.expect_generic_paren('>')?;
1710
1711                ast::Type::BindingArray { base, size }
1712            }
1713            "sampler" => ast::Type::Sampler { comparison: false },
1714            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1715            "texture_1d" => {
1716                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1717                Self::check_texture_sample_type(scalar, span)?;
1718                ast::Type::Image {
1719                    dim: crate::ImageDimension::D1,
1720                    arrayed: false,
1721                    class: crate::ImageClass::Sampled {
1722                        kind: scalar.kind,
1723                        multi: false,
1724                    },
1725                }
1726            }
1727            "texture_1d_array" => {
1728                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1729                Self::check_texture_sample_type(scalar, span)?;
1730                ast::Type::Image {
1731                    dim: crate::ImageDimension::D1,
1732                    arrayed: true,
1733                    class: crate::ImageClass::Sampled {
1734                        kind: scalar.kind,
1735                        multi: false,
1736                    },
1737                }
1738            }
1739            "texture_2d" => {
1740                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1741                Self::check_texture_sample_type(scalar, span)?;
1742                ast::Type::Image {
1743                    dim: crate::ImageDimension::D2,
1744                    arrayed: false,
1745                    class: crate::ImageClass::Sampled {
1746                        kind: scalar.kind,
1747                        multi: false,
1748                    },
1749                }
1750            }
1751            "texture_2d_array" => {
1752                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1753                Self::check_texture_sample_type(scalar, span)?;
1754                ast::Type::Image {
1755                    dim: crate::ImageDimension::D2,
1756                    arrayed: true,
1757                    class: crate::ImageClass::Sampled {
1758                        kind: scalar.kind,
1759                        multi: false,
1760                    },
1761                }
1762            }
1763            "texture_3d" => {
1764                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1765                Self::check_texture_sample_type(scalar, span)?;
1766                ast::Type::Image {
1767                    dim: crate::ImageDimension::D3,
1768                    arrayed: false,
1769                    class: crate::ImageClass::Sampled {
1770                        kind: scalar.kind,
1771                        multi: false,
1772                    },
1773                }
1774            }
1775            "texture_cube" => {
1776                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1777                Self::check_texture_sample_type(scalar, span)?;
1778                ast::Type::Image {
1779                    dim: crate::ImageDimension::Cube,
1780                    arrayed: false,
1781                    class: crate::ImageClass::Sampled {
1782                        kind: scalar.kind,
1783                        multi: false,
1784                    },
1785                }
1786            }
1787            "texture_cube_array" => {
1788                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1789                Self::check_texture_sample_type(scalar, span)?;
1790                ast::Type::Image {
1791                    dim: crate::ImageDimension::Cube,
1792                    arrayed: true,
1793                    class: crate::ImageClass::Sampled {
1794                        kind: scalar.kind,
1795                        multi: false,
1796                    },
1797                }
1798            }
1799            "texture_multisampled_2d" => {
1800                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1801                Self::check_texture_sample_type(scalar, span)?;
1802                ast::Type::Image {
1803                    dim: crate::ImageDimension::D2,
1804                    arrayed: false,
1805                    class: crate::ImageClass::Sampled {
1806                        kind: scalar.kind,
1807                        multi: true,
1808                    },
1809                }
1810            }
1811            "texture_multisampled_2d_array" => {
1812                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1813                Self::check_texture_sample_type(scalar, span)?;
1814                ast::Type::Image {
1815                    dim: crate::ImageDimension::D2,
1816                    arrayed: true,
1817                    class: crate::ImageClass::Sampled {
1818                        kind: scalar.kind,
1819                        multi: true,
1820                    },
1821                }
1822            }
1823            "texture_depth_2d" => ast::Type::Image {
1824                dim: crate::ImageDimension::D2,
1825                arrayed: false,
1826                class: crate::ImageClass::Depth { multi: false },
1827            },
1828            "texture_depth_2d_array" => ast::Type::Image {
1829                dim: crate::ImageDimension::D2,
1830                arrayed: true,
1831                class: crate::ImageClass::Depth { multi: false },
1832            },
1833            "texture_depth_cube" => ast::Type::Image {
1834                dim: crate::ImageDimension::Cube,
1835                arrayed: false,
1836                class: crate::ImageClass::Depth { multi: false },
1837            },
1838            "texture_depth_cube_array" => ast::Type::Image {
1839                dim: crate::ImageDimension::Cube,
1840                arrayed: true,
1841                class: crate::ImageClass::Depth { multi: false },
1842            },
1843            "texture_depth_multisampled_2d" => ast::Type::Image {
1844                dim: crate::ImageDimension::D2,
1845                arrayed: false,
1846                class: crate::ImageClass::Depth { multi: true },
1847            },
1848            "texture_storage_1d" => {
1849                let (format, access) = lexer.next_format_generic()?;
1850                ast::Type::Image {
1851                    dim: crate::ImageDimension::D1,
1852                    arrayed: false,
1853                    class: crate::ImageClass::Storage { format, access },
1854                }
1855            }
1856            "texture_storage_1d_array" => {
1857                let (format, access) = lexer.next_format_generic()?;
1858                ast::Type::Image {
1859                    dim: crate::ImageDimension::D1,
1860                    arrayed: true,
1861                    class: crate::ImageClass::Storage { format, access },
1862                }
1863            }
1864            "texture_storage_2d" => {
1865                let (format, access) = lexer.next_format_generic()?;
1866                ast::Type::Image {
1867                    dim: crate::ImageDimension::D2,
1868                    arrayed: false,
1869                    class: crate::ImageClass::Storage { format, access },
1870                }
1871            }
1872            "texture_storage_2d_array" => {
1873                let (format, access) = lexer.next_format_generic()?;
1874                ast::Type::Image {
1875                    dim: crate::ImageDimension::D2,
1876                    arrayed: true,
1877                    class: crate::ImageClass::Storage { format, access },
1878                }
1879            }
1880            "texture_storage_3d" => {
1881                let (format, access) = lexer.next_format_generic()?;
1882                ast::Type::Image {
1883                    dim: crate::ImageDimension::D3,
1884                    arrayed: false,
1885                    class: crate::ImageClass::Storage { format, access },
1886                }
1887            }
1888            "acceleration_structure" => {
1889                let vertex_return = lexer.next_acceleration_structure_flags()?;
1890                ast::Type::AccelerationStructure { vertex_return }
1891            }
1892            "ray_query" => {
1893                let vertex_return = lexer.next_acceleration_structure_flags()?;
1894                ast::Type::RayQuery { vertex_return }
1895            }
1896            "RayDesc" => ast::Type::RayDesc,
1897            "RayIntersection" => ast::Type::RayIntersection,
1898            _ => return Ok(None),
1899        }))
1900    }
1901
1902    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
1903        use crate::ScalarKind::*;
1904        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
1905        match scalar {
1906            Scalar {
1907                kind: Float | Sint | Uint,
1908                width: 4,
1909            } => Ok(()),
1910            Scalar {
1911                kind: Uint,
1912                width: 8,
1913            } => Ok(()),
1914            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
1915        }
1916    }
1917
1918    /// Parse type declaration of a given name.
1919    fn type_decl<'a>(
1920        &mut self,
1921        lexer: &mut Lexer<'a>,
1922        ctx: &mut ExpressionContext<'a, '_, '_>,
1923    ) -> Result<'a, Handle<ast::Type<'a>>> {
1924        self.track_recursion(|this| {
1925            this.push_rule_span(Rule::TypeDecl, lexer);
1926
1927            let (name, span) = lexer.next_ident_with_span()?;
1928
1929            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
1930                Some(ty) => ty,
1931                None => {
1932                    ctx.unresolved.insert(ast::Dependency {
1933                        ident: name,
1934                        usage: span,
1935                    });
1936                    ast::Type::User(ast::Ident { name, span })
1937                }
1938            };
1939
1940            this.pop_rule_span(lexer);
1941
1942            let handle = ctx.types.append(ty, Span::UNDEFINED);
1943            Ok(handle)
1944        })
1945    }
1946
1947    fn assignment_op_and_rhs<'a>(
1948        &mut self,
1949        lexer: &mut Lexer<'a>,
1950        ctx: &mut ExpressionContext<'a, '_, '_>,
1951        block: &mut ast::Block<'a>,
1952        target: Handle<ast::Expression<'a>>,
1953        span_start: usize,
1954    ) -> Result<'a, ()> {
1955        use crate::BinaryOperator as Bo;
1956
1957        let op = lexer.next();
1958        let (op, value) = match op {
1959            (Token::Operation('='), _) => {
1960                let value = self.general_expression(lexer, ctx)?;
1961                (None, value)
1962            }
1963            (Token::AssignmentOperation(c), _) => {
1964                let op = match c {
1965                    '<' => Bo::ShiftLeft,
1966                    '>' => Bo::ShiftRight,
1967                    '+' => Bo::Add,
1968                    '-' => Bo::Subtract,
1969                    '*' => Bo::Multiply,
1970                    '/' => Bo::Divide,
1971                    '%' => Bo::Modulo,
1972                    '&' => Bo::And,
1973                    '|' => Bo::InclusiveOr,
1974                    '^' => Bo::ExclusiveOr,
1975                    // Note: `consume_token` shouldn't produce any other assignment ops
1976                    _ => unreachable!(),
1977                };
1978
1979                let value = self.general_expression(lexer, ctx)?;
1980                (Some(op), value)
1981            }
1982            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
1983                let op = match token.0 {
1984                    Token::IncrementOperation => ast::StatementKind::Increment,
1985                    Token::DecrementOperation => ast::StatementKind::Decrement,
1986                    _ => unreachable!(),
1987                };
1988
1989                let span = lexer.span_from(span_start);
1990                block.stmts.push(ast::Statement {
1991                    kind: op(target),
1992                    span,
1993                });
1994                return Ok(());
1995            }
1996            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
1997        };
1998
1999        let span = lexer.span_from(span_start);
2000        block.stmts.push(ast::Statement {
2001            kind: ast::StatementKind::Assign { target, op, value },
2002            span,
2003        });
2004        Ok(())
2005    }
2006
2007    /// Parse an assignment statement (will also parse increment and decrement statements)
2008    fn assignment_statement<'a>(
2009        &mut self,
2010        lexer: &mut Lexer<'a>,
2011        ctx: &mut ExpressionContext<'a, '_, '_>,
2012        block: &mut ast::Block<'a>,
2013    ) -> Result<'a, ()> {
2014        let span_start = lexer.start_byte_offset();
2015        let target = self.lhs_expression(lexer, ctx)?;
2016        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2017    }
2018
2019    /// Parse a function call statement.
2020    /// Expects `ident` to be consumed (not in the lexer).
2021    fn function_statement<'a>(
2022        &mut self,
2023        lexer: &mut Lexer<'a>,
2024        ident: &'a str,
2025        ident_span: Span,
2026        span_start: usize,
2027        context: &mut ExpressionContext<'a, '_, '_>,
2028        block: &mut ast::Block<'a>,
2029    ) -> Result<'a, ()> {
2030        self.push_rule_span(Rule::SingularExpr, lexer);
2031
2032        context.unresolved.insert(ast::Dependency {
2033            ident,
2034            usage: ident_span,
2035        });
2036        let arguments = self.arguments(lexer, context)?;
2037        let span = lexer.span_from(span_start);
2038
2039        block.stmts.push(ast::Statement {
2040            kind: ast::StatementKind::Call {
2041                function: ast::Ident {
2042                    name: ident,
2043                    span: ident_span,
2044                },
2045                arguments,
2046            },
2047            span,
2048        });
2049
2050        self.pop_rule_span(lexer);
2051
2052        Ok(())
2053    }
2054
2055    fn function_call_or_assignment_statement<'a>(
2056        &mut self,
2057        lexer: &mut Lexer<'a>,
2058        context: &mut ExpressionContext<'a, '_, '_>,
2059        block: &mut ast::Block<'a>,
2060    ) -> Result<'a, ()> {
2061        let span_start = lexer.start_byte_offset();
2062        match lexer.peek() {
2063            (Token::Word(name), span) => {
2064                // A little hack for 2 token lookahead.
2065                let cloned = lexer.clone();
2066                let _ = lexer.next();
2067                match lexer.peek() {
2068                    (Token::Paren('('), _) => {
2069                        self.function_statement(lexer, name, span, span_start, context, block)
2070                    }
2071                    _ => {
2072                        *lexer = cloned;
2073                        self.assignment_statement(lexer, context, block)
2074                    }
2075                }
2076            }
2077            _ => self.assignment_statement(lexer, context, block),
2078        }
2079    }
2080
2081    fn statement<'a>(
2082        &mut self,
2083        lexer: &mut Lexer<'a>,
2084        ctx: &mut ExpressionContext<'a, '_, '_>,
2085        block: &mut ast::Block<'a>,
2086        brace_nesting_level: u8,
2087    ) -> Result<'a, ()> {
2088        self.track_recursion(|this| {
2089            this.push_rule_span(Rule::Statement, lexer);
2090            match lexer.peek() {
2091                (Token::Separator(';'), _) => {
2092                    let _ = lexer.next();
2093                    this.pop_rule_span(lexer);
2094                }
2095                (Token::Paren('{') | Token::Attribute, _) => {
2096                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2097                    block.stmts.push(ast::Statement {
2098                        kind: ast::StatementKind::Block(inner),
2099                        span,
2100                    });
2101                    this.pop_rule_span(lexer);
2102                }
2103                (Token::Word(word), _) => {
2104                    let kind = match word {
2105                        "_" => {
2106                            let _ = lexer.next();
2107                            lexer.expect(Token::Operation('='))?;
2108                            let expr = this.general_expression(lexer, ctx)?;
2109                            lexer.expect(Token::Separator(';'))?;
2110
2111                            ast::StatementKind::Phony(expr)
2112                        }
2113                        "let" => {
2114                            let _ = lexer.next();
2115                            let name = lexer.next_ident()?;
2116
2117                            let given_ty = if lexer.skip(Token::Separator(':')) {
2118                                let ty = this.type_decl(lexer, ctx)?;
2119                                Some(ty)
2120                            } else {
2121                                None
2122                            };
2123                            lexer.expect(Token::Operation('='))?;
2124                            let expr_id = this.general_expression(lexer, ctx)?;
2125                            lexer.expect(Token::Separator(';'))?;
2126
2127                            let handle = ctx.declare_local(name)?;
2128                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2129                                name,
2130                                ty: given_ty,
2131                                init: expr_id,
2132                                handle,
2133                            }))
2134                        }
2135                        "const" => {
2136                            let _ = lexer.next();
2137                            let name = lexer.next_ident()?;
2138
2139                            let given_ty = if lexer.skip(Token::Separator(':')) {
2140                                let ty = this.type_decl(lexer, ctx)?;
2141                                Some(ty)
2142                            } else {
2143                                None
2144                            };
2145                            lexer.expect(Token::Operation('='))?;
2146                            let expr_id = this.general_expression(lexer, ctx)?;
2147                            lexer.expect(Token::Separator(';'))?;
2148
2149                            let handle = ctx.declare_local(name)?;
2150                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2151                                name,
2152                                ty: given_ty,
2153                                init: expr_id,
2154                                handle,
2155                            }))
2156                        }
2157                        "var" => {
2158                            let _ = lexer.next();
2159
2160                            let name = lexer.next_ident()?;
2161                            let ty = if lexer.skip(Token::Separator(':')) {
2162                                let ty = this.type_decl(lexer, ctx)?;
2163                                Some(ty)
2164                            } else {
2165                                None
2166                            };
2167
2168                            let init = if lexer.skip(Token::Operation('=')) {
2169                                let init = this.general_expression(lexer, ctx)?;
2170                                Some(init)
2171                            } else {
2172                                None
2173                            };
2174
2175                            lexer.expect(Token::Separator(';'))?;
2176
2177                            let handle = ctx.declare_local(name)?;
2178                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2179                                name,
2180                                ty,
2181                                init,
2182                                handle,
2183                            }))
2184                        }
2185                        "return" => {
2186                            let _ = lexer.next();
2187                            let value = if lexer.peek().0 != Token::Separator(';') {
2188                                let handle = this.general_expression(lexer, ctx)?;
2189                                Some(handle)
2190                            } else {
2191                                None
2192                            };
2193                            lexer.expect(Token::Separator(';'))?;
2194                            ast::StatementKind::Return { value }
2195                        }
2196                        "if" => {
2197                            let _ = lexer.next();
2198                            let condition = this.general_expression(lexer, ctx)?;
2199
2200                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2201
2202                            let mut elsif_stack = Vec::new();
2203                            let mut elseif_span_start = lexer.start_byte_offset();
2204                            let mut reject = loop {
2205                                if !lexer.skip(Token::Word("else")) {
2206                                    break ast::Block::default();
2207                                }
2208
2209                                if !lexer.skip(Token::Word("if")) {
2210                                    // ... else { ... }
2211                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2212                                }
2213
2214                                // ... else if (...) { ... }
2215                                let other_condition = this.general_expression(lexer, ctx)?;
2216                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2217                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2218                                elseif_span_start = lexer.start_byte_offset();
2219                            };
2220
2221                            // reverse-fold the else-if blocks
2222                            //Note: we may consider uplifting this to the IR
2223                            for (other_span_start, other_cond, other_block) in
2224                                elsif_stack.into_iter().rev()
2225                            {
2226                                let sub_stmt = ast::StatementKind::If {
2227                                    condition: other_cond,
2228                                    accept: other_block.0,
2229                                    reject,
2230                                };
2231                                reject = ast::Block::default();
2232                                let span = lexer.span_from(other_span_start);
2233                                reject.stmts.push(ast::Statement {
2234                                    kind: sub_stmt,
2235                                    span,
2236                                })
2237                            }
2238
2239                            ast::StatementKind::If {
2240                                condition,
2241                                accept,
2242                                reject,
2243                            }
2244                        }
2245                        "switch" => {
2246                            let _ = lexer.next();
2247                            let selector = this.general_expression(lexer, ctx)?;
2248                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2249                            let brace_nesting_level =
2250                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2251                            let mut cases = Vec::new();
2252
2253                            loop {
2254                                // cases + default
2255                                match lexer.next() {
2256                                    (Token::Word("case"), _) => {
2257                                        // parse a list of values
2258                                        let value = loop {
2259                                            let value = this.switch_value(lexer, ctx)?;
2260                                            if lexer.skip(Token::Separator(',')) {
2261                                                if lexer.skip(Token::Separator(':')) {
2262                                                    break value;
2263                                                }
2264                                            } else {
2265                                                lexer.skip(Token::Separator(':'));
2266                                                break value;
2267                                            }
2268                                            cases.push(ast::SwitchCase {
2269                                                value,
2270                                                body: ast::Block::default(),
2271                                                fall_through: true,
2272                                            });
2273                                        };
2274
2275                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2276
2277                                        cases.push(ast::SwitchCase {
2278                                            value,
2279                                            body,
2280                                            fall_through: false,
2281                                        });
2282                                    }
2283                                    (Token::Word("default"), _) => {
2284                                        lexer.skip(Token::Separator(':'));
2285                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2286                                        cases.push(ast::SwitchCase {
2287                                            value: ast::SwitchValue::Default,
2288                                            body,
2289                                            fall_through: false,
2290                                        });
2291                                    }
2292                                    (Token::Paren('}'), _) => break,
2293                                    (_, span) => {
2294                                        return Err(Box::new(Error::Unexpected(
2295                                            span,
2296                                            ExpectedToken::SwitchItem,
2297                                        )))
2298                                    }
2299                                }
2300                            }
2301
2302                            ast::StatementKind::Switch { selector, cases }
2303                        }
2304                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2305                        "while" => {
2306                            let _ = lexer.next();
2307                            let mut body = ast::Block::default();
2308
2309                            let (condition, span) =
2310                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2311                            let mut reject = ast::Block::default();
2312                            reject.stmts.push(ast::Statement {
2313                                kind: ast::StatementKind::Break,
2314                                span,
2315                            });
2316
2317                            body.stmts.push(ast::Statement {
2318                                kind: ast::StatementKind::If {
2319                                    condition,
2320                                    accept: ast::Block::default(),
2321                                    reject,
2322                                },
2323                                span,
2324                            });
2325
2326                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2327                            body.stmts.push(ast::Statement {
2328                                kind: ast::StatementKind::Block(block),
2329                                span,
2330                            });
2331
2332                            ast::StatementKind::Loop {
2333                                body,
2334                                continuing: ast::Block::default(),
2335                                break_if: None,
2336                            }
2337                        }
2338                        "for" => {
2339                            let _ = lexer.next();
2340                            lexer.expect(Token::Paren('('))?;
2341
2342                            ctx.local_table.push_scope();
2343
2344                            if !lexer.skip(Token::Separator(';')) {
2345                                let num_statements = block.stmts.len();
2346                                let (_, span) = {
2347                                    let ctx = &mut *ctx;
2348                                    let block = &mut *block;
2349                                    lexer.capture_span(|lexer| {
2350                                        this.statement(lexer, ctx, block, brace_nesting_level)
2351                                    })?
2352                                };
2353
2354                                if block.stmts.len() != num_statements {
2355                                    match block.stmts.last().unwrap().kind {
2356                                        ast::StatementKind::Call { .. }
2357                                        | ast::StatementKind::Assign { .. }
2358                                        | ast::StatementKind::LocalDecl(_) => {}
2359                                        _ => {
2360                                            return Err(Box::new(Error::InvalidForInitializer(
2361                                                span,
2362                                            )))
2363                                        }
2364                                    }
2365                                }
2366                            };
2367
2368                            let mut body = ast::Block::default();
2369                            if !lexer.skip(Token::Separator(';')) {
2370                                let (condition, span) =
2371                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2372                                        let condition = this.general_expression(lexer, ctx)?;
2373                                        lexer.expect(Token::Separator(';'))?;
2374                                        Ok(condition)
2375                                    })?;
2376                                let mut reject = ast::Block::default();
2377                                reject.stmts.push(ast::Statement {
2378                                    kind: ast::StatementKind::Break,
2379                                    span,
2380                                });
2381                                body.stmts.push(ast::Statement {
2382                                    kind: ast::StatementKind::If {
2383                                        condition,
2384                                        accept: ast::Block::default(),
2385                                        reject,
2386                                    },
2387                                    span,
2388                                });
2389                            };
2390
2391                            let mut continuing = ast::Block::default();
2392                            if !lexer.skip(Token::Paren(')')) {
2393                                this.function_call_or_assignment_statement(
2394                                    lexer,
2395                                    ctx,
2396                                    &mut continuing,
2397                                )?;
2398                                lexer.expect(Token::Paren(')'))?;
2399                            }
2400
2401                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2402                            body.stmts.push(ast::Statement {
2403                                kind: ast::StatementKind::Block(block),
2404                                span,
2405                            });
2406
2407                            ctx.local_table.pop_scope();
2408
2409                            ast::StatementKind::Loop {
2410                                body,
2411                                continuing,
2412                                break_if: None,
2413                            }
2414                        }
2415                        "break" => {
2416                            let (_, span) = lexer.next();
2417                            // Check if the next token is an `if`, this indicates
2418                            // that the user tried to type out a `break if` which
2419                            // is illegal in this position.
2420                            let (peeked_token, peeked_span) = lexer.peek();
2421                            if let Token::Word("if") = peeked_token {
2422                                let span = span.until(&peeked_span);
2423                                return Err(Box::new(Error::InvalidBreakIf(span)));
2424                            }
2425                            lexer.expect(Token::Separator(';'))?;
2426                            ast::StatementKind::Break
2427                        }
2428                        "continue" => {
2429                            let _ = lexer.next();
2430                            lexer.expect(Token::Separator(';'))?;
2431                            ast::StatementKind::Continue
2432                        }
2433                        "discard" => {
2434                            let _ = lexer.next();
2435                            lexer.expect(Token::Separator(';'))?;
2436                            ast::StatementKind::Kill
2437                        }
2438                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2439                        "const_assert" => {
2440                            let _ = lexer.next();
2441                            // parentheses are optional
2442                            let paren = lexer.skip(Token::Paren('('));
2443
2444                            let condition = this.general_expression(lexer, ctx)?;
2445
2446                            if paren {
2447                                lexer.expect(Token::Paren(')'))?;
2448                            }
2449                            lexer.expect(Token::Separator(';'))?;
2450                            ast::StatementKind::ConstAssert(condition)
2451                        }
2452                        // assignment or a function call
2453                        _ => {
2454                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2455                            lexer.expect(Token::Separator(';'))?;
2456                            this.pop_rule_span(lexer);
2457                            return Ok(());
2458                        }
2459                    };
2460
2461                    let span = this.pop_rule_span(lexer);
2462                    block.stmts.push(ast::Statement { kind, span });
2463                }
2464                _ => {
2465                    this.assignment_statement(lexer, ctx, block)?;
2466                    lexer.expect(Token::Separator(';'))?;
2467                    this.pop_rule_span(lexer);
2468                }
2469            }
2470            Ok(())
2471        })
2472    }
2473
2474    fn r#loop<'a>(
2475        &mut self,
2476        lexer: &mut Lexer<'a>,
2477        ctx: &mut ExpressionContext<'a, '_, '_>,
2478        brace_nesting_level: u8,
2479    ) -> Result<'a, ast::StatementKind<'a>> {
2480        let _ = lexer.next();
2481        let mut body = ast::Block::default();
2482        let mut continuing = ast::Block::default();
2483        let mut break_if = None;
2484
2485        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2486        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2487
2488        ctx.local_table.push_scope();
2489
2490        loop {
2491            if lexer.skip(Token::Word("continuing")) {
2492                // Branch for the `continuing` block, this must be
2493                // the last thing in the loop body
2494
2495                // Expect a opening brace to start the continuing block
2496                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2497                let brace_nesting_level =
2498                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2499                loop {
2500                    if lexer.skip(Token::Word("break")) {
2501                        // Branch for the `break if` statement, this statement
2502                        // has the form `break if <expr>;` and must be the last
2503                        // statement in a continuing block
2504
2505                        // The break must be followed by an `if` to form
2506                        // the break if
2507                        lexer.expect(Token::Word("if"))?;
2508
2509                        let condition = self.general_expression(lexer, ctx)?;
2510                        // Set the condition of the break if to the newly parsed
2511                        // expression
2512                        break_if = Some(condition);
2513
2514                        // Expect a semicolon to close the statement
2515                        lexer.expect(Token::Separator(';'))?;
2516                        // Expect a closing brace to close the continuing block,
2517                        // since the break if must be the last statement
2518                        lexer.expect(Token::Paren('}'))?;
2519                        // Stop parsing the continuing block
2520                        break;
2521                    } else if lexer.skip(Token::Paren('}')) {
2522                        // If we encounter a closing brace it means we have reached
2523                        // the end of the continuing block and should stop processing
2524                        break;
2525                    } else {
2526                        // Otherwise try to parse a statement
2527                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2528                    }
2529                }
2530                // Since the continuing block must be the last part of the loop body,
2531                // we expect to see a closing brace to end the loop body
2532                lexer.expect(Token::Paren('}'))?;
2533                break;
2534            }
2535            if lexer.skip(Token::Paren('}')) {
2536                // If we encounter a closing brace it means we have reached
2537                // the end of the loop body and should stop processing
2538                break;
2539            }
2540            // Otherwise try to parse a statement
2541            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2542        }
2543
2544        ctx.local_table.pop_scope();
2545
2546        Ok(ast::StatementKind::Loop {
2547            body,
2548            continuing,
2549            break_if,
2550        })
2551    }
2552
2553    /// compound_statement
2554    fn block<'a>(
2555        &mut self,
2556        lexer: &mut Lexer<'a>,
2557        ctx: &mut ExpressionContext<'a, '_, '_>,
2558        brace_nesting_level: u8,
2559    ) -> Result<'a, (ast::Block<'a>, Span)> {
2560        self.push_rule_span(Rule::Block, lexer);
2561
2562        ctx.local_table.push_scope();
2563
2564        let mut diagnostic_filters = DiagnosticFilterMap::new();
2565
2566        self.push_rule_span(Rule::Attribute, lexer);
2567        while lexer.skip(Token::Attribute) {
2568            let (name, name_span) = lexer.next_ident_with_span()?;
2569            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2570                let filter = self.diagnostic_filter(lexer)?;
2571                let span = self.peek_rule_span(lexer);
2572                diagnostic_filters
2573                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2574                    .map_err(|e| Box::new(e.into()))?;
2575            } else {
2576                return Err(Box::new(Error::Unexpected(
2577                    name_span,
2578                    ExpectedToken::DiagnosticAttribute,
2579                )));
2580            }
2581        }
2582        self.pop_rule_span(lexer);
2583
2584        if !diagnostic_filters.is_empty() {
2585            return Err(Box::new(
2586                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2587                    site_name_plural: "compound statements",
2588                    spans: diagnostic_filters.spans().collect(),
2589                },
2590            ));
2591        }
2592
2593        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2594        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2595        let mut block = ast::Block::default();
2596        while !lexer.skip(Token::Paren('}')) {
2597            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2598        }
2599
2600        ctx.local_table.pop_scope();
2601
2602        let span = self.pop_rule_span(lexer);
2603        Ok((block, span))
2604    }
2605
2606    fn varying_binding<'a>(
2607        &mut self,
2608        lexer: &mut Lexer<'a>,
2609        ctx: &mut ExpressionContext<'a, '_, '_>,
2610    ) -> Result<'a, Option<ast::Binding<'a>>> {
2611        let mut bind_parser = BindingParser::default();
2612        self.push_rule_span(Rule::Attribute, lexer);
2613
2614        while lexer.skip(Token::Attribute) {
2615            let (word, span) = lexer.next_ident_with_span()?;
2616            bind_parser.parse(self, lexer, word, span, ctx)?;
2617        }
2618
2619        let span = self.pop_rule_span(lexer);
2620        bind_parser.finish(span)
2621    }
2622
2623    fn function_decl<'a>(
2624        &mut self,
2625        lexer: &mut Lexer<'a>,
2626        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2627        must_use: Option<Span>,
2628        out: &mut ast::TranslationUnit<'a>,
2629        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2630    ) -> Result<'a, ast::Function<'a>> {
2631        self.push_rule_span(Rule::FunctionDecl, lexer);
2632        // read function name
2633        let fun_name = lexer.next_ident()?;
2634
2635        let mut locals = Arena::new();
2636
2637        let mut ctx = ExpressionContext {
2638            expressions: &mut out.expressions,
2639            local_table: &mut SymbolTable::default(),
2640            locals: &mut locals,
2641            types: &mut out.types,
2642            unresolved: dependencies,
2643        };
2644
2645        // start a scope that contains arguments as well as the function body
2646        ctx.local_table.push_scope();
2647
2648        // read parameter list
2649        let mut arguments = Vec::new();
2650        lexer.expect(Token::Paren('('))?;
2651        let mut ready = true;
2652        while !lexer.skip(Token::Paren(')')) {
2653            if !ready {
2654                return Err(Box::new(Error::Unexpected(
2655                    lexer.next().1,
2656                    ExpectedToken::Token(Token::Separator(',')),
2657                )));
2658            }
2659            let binding = self.varying_binding(lexer, &mut ctx)?;
2660
2661            let param_name = lexer.next_ident()?;
2662
2663            lexer.expect(Token::Separator(':'))?;
2664            let param_type = self.type_decl(lexer, &mut ctx)?;
2665
2666            let handle = ctx.declare_local(param_name)?;
2667            arguments.push(ast::FunctionArgument {
2668                name: param_name,
2669                ty: param_type,
2670                binding,
2671                handle,
2672            });
2673            ready = lexer.skip(Token::Separator(','));
2674        }
2675        // read return type
2676        let result = if lexer.skip(Token::Arrow) {
2677            let binding = self.varying_binding(lexer, &mut ctx)?;
2678            let ty = self.type_decl(lexer, &mut ctx)?;
2679            let must_use = must_use.is_some();
2680            Some(ast::FunctionResult {
2681                ty,
2682                binding,
2683                must_use,
2684            })
2685        } else if let Some(must_use) = must_use {
2686            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2687                must_use,
2688                self.peek_rule_span(lexer),
2689            )));
2690        } else {
2691            None
2692        };
2693
2694        // do not use `self.block` here, since we must not push a new scope
2695        lexer.expect(Token::Paren('{'))?;
2696        let brace_nesting_level = 1;
2697        let mut body = ast::Block::default();
2698        while !lexer.skip(Token::Paren('}')) {
2699            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2700        }
2701
2702        ctx.local_table.pop_scope();
2703
2704        let fun = ast::Function {
2705            entry_point: None,
2706            name: fun_name,
2707            arguments,
2708            result,
2709            body,
2710            diagnostic_filter_leaf,
2711        };
2712
2713        // done
2714        self.pop_rule_span(lexer);
2715
2716        Ok(fun)
2717    }
2718
2719    fn directive_ident_list<'a>(
2720        &self,
2721        lexer: &mut Lexer<'a>,
2722        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2723    ) -> Result<'a, ()> {
2724        let mut handler = handler;
2725        'next_arg: loop {
2726            let (ident, span) = lexer.next_ident_with_span()?;
2727            handler(ident, span)?;
2728
2729            let expected_token = match lexer.peek().0 {
2730                Token::Separator(',') => {
2731                    let _ = lexer.next();
2732                    if matches!(lexer.peek().0, Token::Word(..)) {
2733                        continue 'next_arg;
2734                    }
2735                    ExpectedToken::AfterIdentListComma
2736                }
2737                _ => ExpectedToken::AfterIdentListArg,
2738            };
2739
2740            if !matches!(lexer.next().0, Token::Separator(';')) {
2741                return Err(Box::new(Error::Unexpected(span, expected_token)));
2742            }
2743
2744            break Ok(());
2745        }
2746    }
2747
2748    fn global_decl<'a>(
2749        &mut self,
2750        lexer: &mut Lexer<'a>,
2751        out: &mut ast::TranslationUnit<'a>,
2752    ) -> Result<'a, ()> {
2753        // read attributes
2754        let mut binding = None;
2755        let mut stage = ParsedAttribute::default();
2756        let mut compute_span = Span::new(0, 0);
2757        let mut workgroup_size = ParsedAttribute::default();
2758        let mut early_depth_test = ParsedAttribute::default();
2759        let (mut bind_index, mut bind_group) =
2760            (ParsedAttribute::default(), ParsedAttribute::default());
2761        let mut id = ParsedAttribute::default();
2762
2763        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2764
2765        let mut dependencies = FastIndexSet::default();
2766        let mut ctx = ExpressionContext {
2767            expressions: &mut out.expressions,
2768            local_table: &mut SymbolTable::default(),
2769            locals: &mut Arena::new(),
2770            types: &mut out.types,
2771            unresolved: &mut dependencies,
2772        };
2773        let mut diagnostic_filters = DiagnosticFilterMap::new();
2774        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2775            if filters.is_empty() {
2776                Ok(())
2777            } else {
2778                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2779                    on_what,
2780                    spans: filters.spans().collect(),
2781                }))
2782            }
2783        };
2784
2785        self.push_rule_span(Rule::Attribute, lexer);
2786        while lexer.skip(Token::Attribute) {
2787            let (name, name_span) = lexer.next_ident_with_span()?;
2788            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2789                let filter = self.diagnostic_filter(lexer)?;
2790                let span = self.peek_rule_span(lexer);
2791                diagnostic_filters
2792                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2793                    .map_err(|e| Box::new(e.into()))?;
2794                continue;
2795            }
2796            match name {
2797                "binding" => {
2798                    lexer.expect(Token::Paren('('))?;
2799                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2800                    lexer.expect(Token::Paren(')'))?;
2801                }
2802                "group" => {
2803                    lexer.expect(Token::Paren('('))?;
2804                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2805                    lexer.expect(Token::Paren(')'))?;
2806                }
2807                "id" => {
2808                    lexer.expect(Token::Paren('('))?;
2809                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2810                    lexer.expect(Token::Paren(')'))?;
2811                }
2812                "vertex" => {
2813                    stage.set(ShaderStage::Vertex, name_span)?;
2814                }
2815                "fragment" => {
2816                    stage.set(ShaderStage::Fragment, name_span)?;
2817                }
2818                "compute" => {
2819                    stage.set(ShaderStage::Compute, name_span)?;
2820                    compute_span = name_span;
2821                }
2822                "workgroup_size" => {
2823                    lexer.expect(Token::Paren('('))?;
2824                    let mut new_workgroup_size = [None; 3];
2825                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
2826                        *size = Some(self.general_expression(lexer, &mut ctx)?);
2827                        match lexer.next() {
2828                            (Token::Paren(')'), _) => break,
2829                            (Token::Separator(','), _) if i != 2 => (),
2830                            other => {
2831                                return Err(Box::new(Error::Unexpected(
2832                                    other.1,
2833                                    ExpectedToken::WorkgroupSizeSeparator,
2834                                )))
2835                            }
2836                        }
2837                    }
2838                    workgroup_size.set(new_workgroup_size, name_span)?;
2839                }
2840                "early_depth_test" => {
2841                    let conservative = if lexer.skip(Token::Paren('(')) {
2842                        let (ident, ident_span) = lexer.next_ident_with_span()?;
2843                        let value = conv::map_conservative_depth(ident, ident_span)?;
2844                        lexer.expect(Token::Paren(')'))?;
2845                        Some(value)
2846                    } else {
2847                        None
2848                    };
2849                    early_depth_test.set(crate::EarlyDepthTest { conservative }, name_span)?;
2850                }
2851                "must_use" => {
2852                    must_use.set(name_span, name_span)?;
2853                }
2854                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2855            }
2856        }
2857
2858        let attrib_span = self.pop_rule_span(lexer);
2859        match (bind_group.value, bind_index.value) {
2860            (Some(group), Some(index)) => {
2861                binding = Some(ast::ResourceBinding {
2862                    group,
2863                    binding: index,
2864                });
2865            }
2866            (Some(_), None) => {
2867                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2868            }
2869            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2870            (None, None) => {}
2871        }
2872
2873        // read item
2874        let start = lexer.start_byte_offset();
2875        let kind = match lexer.next() {
2876            (Token::Separator(';'), _) => {
2877                ensure_no_diag_attrs(
2878                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2879                    diagnostic_filters,
2880                )?;
2881                None
2882            }
2883            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2884                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2885                    directive_span,
2886                }));
2887            }
2888            (Token::Word("struct"), _) => {
2889                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2890
2891                let name = lexer.next_ident()?;
2892
2893                let members = self.struct_body(lexer, &mut ctx)?;
2894                Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members }))
2895            }
2896            (Token::Word("alias"), _) => {
2897                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2898
2899                let name = lexer.next_ident()?;
2900
2901                lexer.expect(Token::Operation('='))?;
2902                let ty = self.type_decl(lexer, &mut ctx)?;
2903                lexer.expect(Token::Separator(';'))?;
2904                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2905            }
2906            (Token::Word("const"), _) => {
2907                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2908
2909                let name = lexer.next_ident()?;
2910
2911                let ty = if lexer.skip(Token::Separator(':')) {
2912                    let ty = self.type_decl(lexer, &mut ctx)?;
2913                    Some(ty)
2914                } else {
2915                    None
2916                };
2917
2918                lexer.expect(Token::Operation('='))?;
2919                let init = self.general_expression(lexer, &mut ctx)?;
2920                lexer.expect(Token::Separator(';'))?;
2921
2922                Some(ast::GlobalDeclKind::Const(ast::Const { name, ty, init }))
2923            }
2924            (Token::Word("override"), _) => {
2925                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2926
2927                let name = lexer.next_ident()?;
2928
2929                let ty = if lexer.skip(Token::Separator(':')) {
2930                    Some(self.type_decl(lexer, &mut ctx)?)
2931                } else {
2932                    None
2933                };
2934
2935                let init = if lexer.skip(Token::Operation('=')) {
2936                    Some(self.general_expression(lexer, &mut ctx)?)
2937                } else {
2938                    None
2939                };
2940
2941                lexer.expect(Token::Separator(';'))?;
2942
2943                Some(ast::GlobalDeclKind::Override(ast::Override {
2944                    name,
2945                    id: id.value,
2946                    ty,
2947                    init,
2948                }))
2949            }
2950            (Token::Word("var"), _) => {
2951                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2952
2953                let mut var = self.variable_decl(lexer, &mut ctx)?;
2954                var.binding = binding.take();
2955                Some(ast::GlobalDeclKind::Var(var))
2956            }
2957            (Token::Word("fn"), _) => {
2958                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2959                    &mut out.diagnostic_filters,
2960                    diagnostic_filters,
2961                    out.diagnostic_filter_leaf,
2962                );
2963
2964                let function = self.function_decl(
2965                    lexer,
2966                    diagnostic_filter_leaf,
2967                    must_use.value,
2968                    out,
2969                    &mut dependencies,
2970                )?;
2971                Some(ast::GlobalDeclKind::Fn(ast::Function {
2972                    entry_point: if let Some(stage) = stage.value {
2973                        if stage == ShaderStage::Compute && workgroup_size.value.is_none() {
2974                            return Err(Box::new(Error::MissingWorkgroupSize(compute_span)));
2975                        }
2976                        Some(ast::EntryPoint {
2977                            stage,
2978                            early_depth_test: early_depth_test.value,
2979                            workgroup_size: workgroup_size.value,
2980                        })
2981                    } else {
2982                        None
2983                    },
2984                    ..function
2985                }))
2986            }
2987            (Token::Word("const_assert"), _) => {
2988                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
2989
2990                // parentheses are optional
2991                let paren = lexer.skip(Token::Paren('('));
2992
2993                let condition = self.general_expression(lexer, &mut ctx)?;
2994
2995                if paren {
2996                    lexer.expect(Token::Paren(')'))?;
2997                }
2998                lexer.expect(Token::Separator(';'))?;
2999                Some(ast::GlobalDeclKind::ConstAssert(condition))
3000            }
3001            (Token::End, _) => return Ok(()),
3002            other => {
3003                return Err(Box::new(Error::Unexpected(
3004                    other.1,
3005                    ExpectedToken::GlobalItem,
3006                )))
3007            }
3008        };
3009
3010        if let Some(kind) = kind {
3011            out.decls.append(
3012                ast::GlobalDecl { kind, dependencies },
3013                lexer.span_from(start),
3014            );
3015        }
3016
3017        if !self.rules.is_empty() {
3018            log::error!("Reached the end of global decl, but rule stack is not empty");
3019            log::error!("Rules: {:?}", self.rules);
3020            return Err(Box::new(Error::Internal("rule stack is not empty")));
3021        };
3022
3023        match binding {
3024            None => Ok(()),
3025            Some(_) => Err(Box::new(Error::Internal(
3026                "we had the attribute but no var?",
3027            ))),
3028        }
3029    }
3030
3031    pub fn parse<'a>(&mut self, source: &'a str) -> Result<'a, ast::TranslationUnit<'a>> {
3032        self.reset();
3033
3034        let mut lexer = Lexer::new(source);
3035        let mut tu = ast::TranslationUnit::default();
3036        let mut enable_extensions = EnableExtensions::empty();
3037        let mut diagnostic_filters = DiagnosticFilterMap::new();
3038
3039        // Parse directives.
3040        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3041            if let Some(kind) = DirectiveKind::from_ident(ident) {
3042                self.push_rule_span(Rule::Directive, &mut lexer);
3043                let _ = lexer.next_ident_with_span().unwrap();
3044                match kind {
3045                    DirectiveKind::Diagnostic => {
3046                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3047                        let span = self.peek_rule_span(&lexer);
3048                        diagnostic_filters
3049                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3050                            .map_err(|e| Box::new(e.into()))?;
3051                        lexer.expect(Token::Separator(';'))?;
3052                    }
3053                    DirectiveKind::Enable => {
3054                        self.directive_ident_list(&mut lexer, |ident, span| {
3055                            let kind = EnableExtension::from_ident(ident, span)?;
3056                            let extension = match kind {
3057                                EnableExtension::Implemented(kind) => kind,
3058                                EnableExtension::Unimplemented(kind) => {
3059                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3060                                        kind,
3061                                        span,
3062                                    }))
3063                                }
3064                            };
3065                            enable_extensions.add(extension);
3066                            Ok(())
3067                        })?;
3068                    }
3069                    DirectiveKind::Requires => {
3070                        self.directive_ident_list(&mut lexer, |ident, span| {
3071                            match LanguageExtension::from_ident(ident) {
3072                                Some(LanguageExtension::Implemented(_kind)) => {
3073                                    // NOTE: No further validation is needed for an extension, so
3074                                    // just throw parsed information away. If we ever want to apply
3075                                    // what we've parsed to diagnostics, maybe we'll want to refer
3076                                    // to enabled extensions later?
3077                                    Ok(())
3078                                }
3079                                Some(LanguageExtension::Unimplemented(kind)) => {
3080                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3081                                        kind,
3082                                        span,
3083                                    }))
3084                                }
3085                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3086                            }
3087                        })?;
3088                    }
3089                }
3090                self.pop_rule_span(&lexer);
3091            } else {
3092                break;
3093            }
3094        }
3095
3096        lexer.enable_extensions = enable_extensions.clone();
3097        tu.enable_extensions = enable_extensions;
3098        tu.diagnostic_filter_leaf =
3099            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3100
3101        loop {
3102            match self.global_decl(&mut lexer, &mut tu) {
3103                Err(error) => return Err(error),
3104                Ok(()) => {
3105                    if lexer.peek().0 == Token::End {
3106                        break;
3107                    }
3108                }
3109            }
3110        }
3111
3112        Ok(tu)
3113    }
3114
3115    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3116        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3117        //
3118        // > § 2.4. Limits
3119        // >
3120        // > …
3121        // >
3122        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3123        const BRACE_NESTING_MAXIMUM: u8 = 127;
3124        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3125            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3126                span: brace_span,
3127                limit: BRACE_NESTING_MAXIMUM,
3128            }));
3129        }
3130        Ok(brace_nesting_level + 1)
3131    }
3132
3133    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3134        lexer.expect(Token::Paren('('))?;
3135
3136        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3137        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3138            .ok_or(Error::DiagnosticInvalidSeverity {
3139                severity_control_name_span,
3140            })?;
3141
3142        lexer.expect(Token::Separator(','))?;
3143
3144        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3145        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3146            let (ident, _span) = lexer.next_ident_with_span()?;
3147            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3148        } else {
3149            let diagnostic_rule_name = diagnostic_name_token;
3150            let diagnostic_rule_name_span = diagnostic_name_token_span;
3151            if let Some(triggering_rule) =
3152                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3153            {
3154                FilterableTriggeringRule::Standard(triggering_rule)
3155            } else {
3156                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3157                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3158                    lexer.source,
3159                )?;
3160                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3161            }
3162        };
3163        let filter = DiagnosticFilter {
3164            triggering_rule,
3165            new_severity,
3166        };
3167        lexer.skip(Token::Separator(','));
3168        lexer.expect(Token::Paren(')'))?;
3169
3170        Ok(filter)
3171    }
3172
3173    pub(crate) fn write_diagnostic_filters(
3174        arena: &mut Arena<DiagnosticFilterNode>,
3175        filters: DiagnosticFilterMap,
3176        parent: Option<Handle<DiagnosticFilterNode>>,
3177    ) -> Option<Handle<DiagnosticFilterNode>> {
3178        filters
3179            .into_iter()
3180            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3181                Some(arena.append(
3182                    DiagnosticFilterNode {
3183                        inner: DiagnosticFilter {
3184                            new_severity,
3185                            triggering_rule,
3186                        },
3187                        parent,
3188                    },
3189                    span,
3190                ))
3191            })
3192    }
3193}