Remove Name type and add quoted identifier token

This commit is contained in:
Brandon Dyck 2023-07-16 23:30:20 -06:00
parent 9dfdf1109e
commit 64fac5a9fb
8 changed files with 103 additions and 120 deletions

17
AST.cs
View File

@ -3,16 +3,7 @@ using System.Collections.Generic;
namespace Finn.AST; namespace Finn.AST;
public record Name(string Value, bool Quoted) public record Field(Token Name, Expr? Value);
{
public override string ToString()
{
if (this.Quoted) return $"@\"{this.Value}\"";
return this.Value;
}
};
public record Field(Name Name, Expr? Value);
public record BaseRecord(Expr Value, Field[] Updates); public record BaseRecord(Expr Value, Field[] Updates);
@ -20,7 +11,7 @@ public partial record Variable
{ {
public override string ToString() public override string ToString()
{ {
return this.Value.ToString(); return this.Value.lexeme;
} }
} }
@ -48,13 +39,13 @@ public partial record SimplePattern
{ {
return "_"; return "_";
} }
return this.Identifier.ToString(); return this.Identifier.lexeme;
} }
} }
public abstract record Binding(Expr Value); public abstract record Binding(Expr Value);
public record VarBinding(Pattern Pattern, Expr Value) : Binding(Value); public record VarBinding(Pattern Pattern, Expr Value) : Binding(Value);
public record FuncBinding(Name Name, Pattern[] Params, Expr Value) : Binding(Value) public record FuncBinding(Token Name, Pattern[] Params, Expr Value) : Binding(Value)
{ {
public override string ToString() public override string ToString()
{ {

View File

@ -69,7 +69,7 @@ public partial record If(Expr Condition, Expr Then, Expr Else) : Expr()
return visitor.visitIfExpr(context, this); return visitor.visitIfExpr(context, this);
} }
} }
public partial record Variable(Name Value) : Expr() public partial record Variable(Token Value) : Expr()
{ {
public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor)
{ {
@ -83,7 +83,7 @@ public partial record List(Expr[] Elements) : Expr()
return visitor.visitListExpr(context, this); return visitor.visitListExpr(context, this);
} }
} }
public partial record Variant(Name Tag, Expr? Argument) : Expr() public partial record Variant(Token Tag, Expr? Argument) : Expr()
{ {
public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor)
{ {
@ -97,7 +97,7 @@ public partial record Record(Field[] Extensions, BaseRecord? Base) : Expr()
return visitor.visitRecordExpr(context, this); return visitor.visitRecordExpr(context, this);
} }
} }
public partial record Selector(Expr Left, Name FieldName) : Expr() public partial record Selector(Expr Left, Token FieldName) : Expr()
{ {
public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IExprVisitor<TContext, TResult> visitor)
{ {

View File

@ -31,33 +31,33 @@ public class Env
this.enclosing = enclosing; this.enclosing = enclosing;
} }
public object this[AST.Name name] public object this[Token identifier]
{ {
set set
{ {
if (values.ContainsKey(name.Value)) var name = (string)identifier.literal!;
if (values.ContainsKey(name))
{ {
// TODO use real location info // TODO use real location info
var tok = new Token(TokenType.Identifier, name.Value, null, new(0, 1, 1)); throw new RuntimeError(identifier, $"Cannot redefine variable {name} in same scope.");
throw new RuntimeError(tok, $"Cannot redefine variable {name} in same scope.");
} }
values[name.Value] = value; values[name] = value;
} }
get get
{ {
var name = (string)identifier.literal!;
try try
{ {
return values[name.Value]; return values[name];
} }
catch catch
{ {
if (enclosing != null) if (enclosing != null)
{ {
return enclosing[name]; return enclosing[identifier];
} }
// TODO use real location info // TODO use real location info
var tok = new Token(TokenType.Identifier, name.Value, null, new(0, 1, 1)); throw new RuntimeError(identifier, $"Undefined variable {name}.");
throw new RuntimeError(tok, $"Undefined variable {name}.");
} }
} }
} }
@ -98,8 +98,9 @@ public class Interpreter : AST.IExprVisitor<Env, object>
var removedLabels = new List<string>(); var removedLabels = new List<string>();
foreach (var field in pattern.Fields) foreach (var field in pattern.Fields)
{ {
removedLabels.Add(field.Name.Value); string name = (string)field.Name.literal!;
var fieldValue = r.Get(field.Name.Value); removedLabels.Add(name);
var fieldValue = r.Get(name);
field.accept((fieldValue, env), this); field.accept((fieldValue, env), this);
} }
if (pattern.Rest != null) if (pattern.Rest != null)
@ -129,9 +130,10 @@ public class Interpreter : AST.IExprVisitor<Env, object>
switch (obj) switch (obj)
{ {
case Variant v: case Variant v:
if (v.Tag != pattern.Tag.Value) var tag = (string)pattern.Tag.literal!;
if (v.Tag != tag)
{ {
throw new PatternTagMismatchException(pattern.Tag.Value, v.Tag); throw new PatternTagMismatchException(tag, v.Tag);
} }
if (v.Value == null && pattern.Argument == null) if (v.Value == null && pattern.Argument == null)
{ {
@ -502,7 +504,7 @@ public class Interpreter : AST.IExprVisitor<Env, object>
HashSet<string> updateLabels = new HashSet<string>(); HashSet<string> updateLabels = new HashSet<string>();
foreach (AST.Field update in expr.Base.Updates) foreach (AST.Field update in expr.Base.Updates)
{ {
var label = update.Name.Value; var label = (string)update.Name.literal!;
if (updateLabels.Contains(label)) if (updateLabels.Contains(label))
{ {
throw new RuntimeError(tok, "Record updates must be to unique fields."); throw new RuntimeError(tok, "Record updates must be to unique fields.");
@ -525,7 +527,7 @@ public class Interpreter : AST.IExprVisitor<Env, object>
HashSet<string> extLabels = new HashSet<string>(); HashSet<string> extLabels = new HashSet<string>();
foreach (AST.Field extension in expr.Extensions) foreach (AST.Field extension in expr.Extensions)
{ {
var label = extension.Name.Value; var label = (string)extension.Name.literal!;
if (extLabels.Contains(label)) if (extLabels.Contains(label))
{ {
throw new RuntimeError(tok, "Record extensions must have unique field names."); throw new RuntimeError(tok, "Record extensions must have unique field names.");
@ -547,7 +549,7 @@ public class Interpreter : AST.IExprVisitor<Env, object>
var r = checkRecordOperand(tok, left); var r = checkRecordOperand(tok, left);
try try
{ {
return r.Get(expr.FieldName.Value); return r.Get((string)expr.FieldName.literal!);
} }
catch catch
{ {
@ -586,11 +588,12 @@ public class Interpreter : AST.IExprVisitor<Env, object>
public object visitVariantExpr(Env env, AST.Variant expr) public object visitVariantExpr(Env env, AST.Variant expr)
{ {
var tag = (string)expr.Tag.literal!;
if (expr.Argument == null) if (expr.Argument == null)
{ {
return new Variant(expr.Tag.Value, null); return new Variant(tag, null);
} }
return new Variant(expr.Tag.Value, evaluate(env, expr.Argument)); return new Variant(tag, evaluate(env, expr.Argument));
} }
public object visitWhenExpr(Env env, AST.When expr) public object visitWhenExpr(Env env, AST.When expr)

View File

@ -71,9 +71,9 @@ class Parser
return tokens[current - 1]; return tokens[current - 1];
} }
private Token consume(TokenType type, string message) private Token consume(string message, params TokenType[] types)
{ {
if (check(type)) return advance(); if (check(types)) return advance();
throw error(peek(), message); throw error(peek(), message);
} }
@ -162,11 +162,7 @@ class Parser
List<FieldPattern> fields = new List<FieldPattern>(); List<FieldPattern> fields = new List<FieldPattern>();
while (!check(TokenType.RBrace, TokenType.Pipe)) while (!check(TokenType.RBrace, TokenType.Pipe))
{ {
var fieldName = name(); var fieldName = consume("Expect identifier as field name.", TokenType.Identifier, TokenType.QuotedIdentifier);
if (fieldName == null)
{
throw error(peek(), "Expect identifier as field name.");
}
var pat = match(TokenType.Equal) ? pattern() : null; var pat = match(TokenType.Equal) ? pattern() : null;
fields.Add(new(fieldName, pat)); fields.Add(new(fieldName, pat));
if (!match(TokenType.Comma)) if (!match(TokenType.Comma))
@ -175,7 +171,7 @@ class Parser
} }
} }
var restPattern = match(TokenType.Pipe) ? simplePattern() : null; var restPattern = match(TokenType.Pipe) ? simplePattern() : null;
consume(TokenType.RBrace, "Expect '}' at end of record pattern."); consume("Expect '}' at end of record pattern.", TokenType.RBrace);
return new(fields.ToArray(), restPattern); return new(fields.ToArray(), restPattern);
} }
@ -185,17 +181,13 @@ class Parser
{ {
return null; return null;
} }
Name? tag = name(); var tag = consume("Expect identifier as tag name.", TokenType.Identifier, TokenType.QuotedIdentifier);
if (tag == null)
{
throw error(peek(), "Expect identifier as tag name.");
}
if (!match(TokenType.LParen)) if (!match(TokenType.LParen))
{ {
return new(tag, null); return new(tag, null);
} }
Pattern argument = pattern(); Pattern argument = pattern();
consume(TokenType.RParen, "Expect ')' after variant argument."); consume("Expect ')' after variant argument.", TokenType.RParen);
return new(tag, argument); return new(tag, argument);
} }
@ -206,13 +198,7 @@ class Parser
return new SimplePattern(null); return new SimplePattern(null);
} }
var identifier = name(); return match(TokenType.Identifier, TokenType.QuotedIdentifier) ? new(previous()) : null;
if (identifier != null)
{
return new SimplePattern(identifier);
}
return null;
} }
private Pattern pattern() private Pattern pattern()
@ -254,7 +240,7 @@ class Parser
{ {
bindings.Add(parseBinding()); bindings.Add(parseBinding());
} }
consume(TokenType.In, "Expect 'in' after let-bindings."); consume("Expect 'in' after let-bindings.", TokenType.In);
Expr body = expression(); Expr body = expression();
return new Let(bindings.ToArray(), body); return new Let(bindings.ToArray(), body);
@ -273,11 +259,11 @@ class Parser
break; break;
} }
} }
consume(TokenType.RParen, "Expect ')' at end of parameters."); consume("Expect ')' at end of parameters.", TokenType.RParen);
consume(TokenType.Equal, "Expect '=' after parameters."); consume("Expect '=' after parameters.", TokenType.Equal);
return new FuncBinding(funcName, funcParams.ToArray(), expression()); return new FuncBinding(funcName, funcParams.ToArray(), expression());
default: default:
consume(TokenType.Equal, "Expect '=' after pattern."); consume("Expect '=' after pattern.", TokenType.Equal);
return new VarBinding(p, expression()); return new VarBinding(p, expression());
} }
} }
@ -292,9 +278,9 @@ class Parser
return when(); return when();
} }
Expr condition = expression(); Expr condition = expression();
consume(TokenType.Then, "Expect 'then' after condition."); consume("Expect 'then' after condition.", TokenType.Then);
Expr thenCase = expression(); Expr thenCase = expression();
consume(TokenType.Else, "Expect 'else' after 'then' case."); consume("Expect 'else' after 'then' case.", TokenType.Else);
Expr elseCase = expression(); Expr elseCase = expression();
return new If(condition, thenCase, elseCase); return new If(condition, thenCase, elseCase);
} }
@ -306,7 +292,7 @@ class Parser
return primary(); return primary();
} }
Expr head = expression(); Expr head = expression();
consume(TokenType.Is, "Expect 'is' after expression."); consume("Expect 'is' after expression.", TokenType.Is);
List<VarBinding> cases = new List<VarBinding>(); List<VarBinding> cases = new List<VarBinding>();
cases.Add(parseCase()); cases.Add(parseCase());
@ -319,44 +305,26 @@ class Parser
VarBinding parseCase() VarBinding parseCase()
{ {
Pattern pat = pattern(); Pattern pat = pattern();
consume(TokenType.DoubleArrow, "Expect '=>' after pattern."); consume("Expect '=>' after pattern.", TokenType.DoubleArrow);
Expr value = expression(); Expr value = expression();
return new VarBinding(pat, value); return new VarBinding(pat, value);
} }
} }
private Name? name()
{
if (match(TokenType.Identifier))
{
return new(previous().lexeme, false);
}
if (match(TokenType.At))
{
Token literal = consume(TokenType.String, "Expect string literal after '@'.");
return new((string)(literal.literal!), true);
}
return null;
}
private Expr primary() private Expr primary()
{ {
Expr expr = operand(); Expr expr = operand();
if (match(TokenType.Period)) if (match(TokenType.Period))
{ {
Name? ident = name(); var ident = consume("Expect identifier after dot.", TokenType.Identifier, TokenType.QuotedIdentifier);
if (ident == null)
{
throw error(advance(), "Expect identifier after dot.");
}
return new Selector(expr, ident); return new Selector(expr, ident);
} }
if (match(TokenType.LBracket)) if (match(TokenType.LBracket))
{ {
var index = expression(); var index = expression();
consume(TokenType.RBracket, "Expect '[' after expression."); consume("Expect '[' after expression.", TokenType.RBracket);
return new Indexer(expr, index); return new Indexer(expr, index);
} }
@ -378,7 +346,7 @@ class Parser
break; break;
} }
} }
consume(TokenType.RParen, "Expect ')' after arguments."); consume("Expect ')' after arguments.", TokenType.RParen);
return new Call(expr, args.ToArray()); return new Call(expr, args.ToArray());
} }
@ -393,16 +361,15 @@ class Parser
return new Literal(previous().literal!); return new Literal(previous().literal!);
} }
var ident = name(); if (match(TokenType.Identifier, TokenType.QuotedIdentifier))
if (ident != null)
{ {
return new Variable(ident); return new Variable(previous());
} }
if (match(TokenType.LParen)) if (match(TokenType.LParen))
{ {
Expr groupedExpr = expression(); Expr groupedExpr = expression();
consume(TokenType.RParen, "Expect ')' after expression."); consume("Expect ')' after expression.", TokenType.RParen);
return new Grouping(groupedExpr); return new Grouping(groupedExpr);
} }
@ -463,18 +430,14 @@ class Parser
{ {
return null; return null;
} }
Name? tag = name(); var tag = consume("Expect identifier as tag name.", TokenType.QuotedIdentifier, TokenType.Identifier);
Expr? argument = null; Expr? argument = null;
if (tag == null)
{
throw error(peek(), "Expect identifier as tag name.");
}
if (match(TokenType.LParen)) if (match(TokenType.LParen))
{ {
if (!match(TokenType.RParen)) if (!match(TokenType.RParen))
{ {
argument = expression(); argument = expression();
consume(TokenType.RParen, "Expect ')' after variant argument."); consume("Expect ')' after variant argument.", TokenType.RParen);
} }
} }
return new Variant(tag, argument); return new Variant(tag, argument);
@ -497,7 +460,7 @@ class Parser
baseRecord = new(baseExpr, updates); baseRecord = new(baseExpr, updates);
} }
consume(TokenType.RBrace, "Expect '}' at end of record literal."); consume("Expect '}' at end of record literal.", TokenType.RBrace);
return new Record(extensions, baseRecord); return new Record(extensions, baseRecord);
Field[] parseFields(params TokenType[] endAt) Field[] parseFields(params TokenType[] endAt)
@ -506,11 +469,7 @@ class Parser
while (!check(endAt)) while (!check(endAt))
{ {
var fieldName = name(); var fieldName = consume("Expect identifier as field name.", TokenType.Identifier, TokenType.QuotedIdentifier);
if (fieldName == null)
{
throw error(peek(), "Expect identifier as field name.");
}
var value = match(TokenType.Equal) ? expression() : null; var value = match(TokenType.Equal) ? expression() : null;
fields.Add(new(fieldName, value)); fields.Add(new(fieldName, value));
if (!match(TokenType.Comma)) if (!match(TokenType.Comma))

View File

@ -16,21 +16,21 @@ public interface IPatternVisitor<TContext, TResult> {
TResult visitFieldPatternPattern(TContext context, FieldPattern pattern); TResult visitFieldPatternPattern(TContext context, FieldPattern pattern);
TResult visitRecordPatternPattern(TContext context, RecordPattern pattern); TResult visitRecordPatternPattern(TContext context, RecordPattern pattern);
} }
public partial record SimplePattern(Name? Identifier) : Pattern() public partial record SimplePattern(Token? Identifier) : Pattern()
{ {
public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor)
{ {
return visitor.visitSimplePatternPattern(context, this); return visitor.visitSimplePatternPattern(context, this);
} }
} }
public partial record VariantPattern(Name Tag, Pattern? Argument) : Pattern() public partial record VariantPattern(Token Tag, Pattern? Argument) : Pattern()
{ {
public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor)
{ {
return visitor.visitVariantPatternPattern(context, this); return visitor.visitVariantPatternPattern(context, this);
} }
} }
public partial record FieldPattern(Name Name, Pattern? Pattern) : Pattern() public partial record FieldPattern(Token Name, Pattern? Pattern) : Pattern()
{ {
public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor) public override TResult accept<TContext, TResult>(TContext context, IPatternVisitor<TContext, TResult> visitor)
{ {

View File

@ -41,8 +41,16 @@ class Program
{ {
var scanner = new Scanner(src); var scanner = new Scanner(src);
List<Token> tokens = scanner.scanTokens(); List<Token> tokens = scanner.scanTokens();
Console.WriteLine("TOKENS\n=======");
foreach (var token in tokens)
{
Console.WriteLine(token);
}
Parser parser = new Parser(tokens); Parser parser = new Parser(tokens);
Expr? expression = parser.parse(); Expr? expression = parser.parse();
Console.WriteLine("\nAST\n=======");
Console.WriteLine(expression);
Console.WriteLine();
if (hadError) if (hadError)
{ {
@ -132,7 +140,7 @@ public enum TokenType
Recurse, Recurse,
Def, Def,
Blank, Blank,
Identifier, Number, String, Identifier, QuotedIdentifier, Number, String,
EOF EOF
} }
@ -207,10 +215,14 @@ class Scanner
addToken(type, null); addToken(type, null);
} }
private string lexeme
{
get => source.Substring(start.Offset, current.Offset - start.Offset);
}
private void addToken(TokenType type, Object? literal) private void addToken(TokenType type, Object? literal)
{ {
String text = source.Substring(start.Offset, current.Offset - start.Offset); tokens.Add(new Token(type, lexeme, literal, start));
tokens.Add(new Token(type, text, literal, start));
} }
private bool match(char expected) private bool match(char expected)
@ -237,8 +249,9 @@ class Scanner
return source[current.Offset]; return source[current.Offset];
} }
private void stringLiteral() private string? _stringLiteral(string errorName)
{ {
var valueStart = current.Offset;
while (peek() != '"' && !isAtEnd()) while (peek() != '"' && !isAtEnd())
{ {
if (peek() == '\n') if (peek() == '\n')
@ -248,15 +261,32 @@ class Scanner
if (isAtEnd()) if (isAtEnd())
{ {
Program.error(current, "Unterminated string."); Program.error(current, $"Unterminated {errorName}.");
return; return null;
} }
// The closing ". // The closing ".
advance(); advance();
// Trim the surrounding quotes. // Trim the closing quote.
String value = source.Substring(start.Offset + 1, current.Offset - start.Offset - 2); return source.Substring(valueStart, current.Offset - valueStart - 1);
}
private void quotedIdentifier()
{
if (!match('"'))
{
Program.error(current, "Expect \" after @.");
}
var value = _stringLiteral("quoted identifier");
if (value == null) return;
addToken(TokenType.QuotedIdentifier, value);
}
private void stringLiteral()
{
var value = _stringLiteral("string");
if (value == null) return;
addToken(TokenType.String, value); addToken(TokenType.String, value);
} }
@ -300,7 +330,7 @@ class Scanner
String text = source.Substring(start.Offset, current.Offset - start.Offset); String text = source.Substring(start.Offset, current.Offset - start.Offset);
TokenType type; TokenType type;
var isKeyword = keywords.TryGetValue(text, out type); var isKeyword = keywords.TryGetValue(text, out type);
addToken(isKeyword ? type : TokenType.Identifier); addToken(isKeyword ? type : TokenType.Identifier, text);
} }
private void scanToken() private void scanToken()
@ -317,7 +347,6 @@ class Scanner
case ':': addToken(TokenType.Colon); break; case ':': addToken(TokenType.Colon); break;
case '\'': addToken(TokenType.Tick); break; case '\'': addToken(TokenType.Tick); break;
case '`': addToken(TokenType.Backtick); break; case '`': addToken(TokenType.Backtick); break;
case '@': addToken(TokenType.At); break;
case ',': addToken(TokenType.Comma); break; case ',': addToken(TokenType.Comma); break;
case ';': addToken(TokenType.Semicolon); break; case ';': addToken(TokenType.Semicolon); break;
case '.': addToken(TokenType.Period); break; case '.': addToken(TokenType.Period); break;
@ -344,6 +373,7 @@ class Scanner
case '>': case '>':
addToken(match('=') ? TokenType.GreaterEqual : TokenType.Greater); addToken(match('=') ? TokenType.GreaterEqual : TokenType.Greater);
break; break;
case '@': quotedIdentifier(); break;
case '"': stringLiteral(); break; case '"': stringLiteral(); break;
case '#': case '#':
while (peek() != '\n' && !isAtEnd()) advance(); while (peek() != '\n' && !isAtEnd()) advance();

View File

@ -1,3 +1,3 @@
Unify identifier types
Include tokens in AST nodes Include tokens in AST nodes
Figure out multiple-binding let-expr semantics Figure out multiple-binding let-expr semantics
Inject error handling into parser and scanner

View File

@ -23,18 +23,18 @@ let exprTypes =
{ Type = "Expr"; Name = "Then" } { Type = "Expr"; Name = "Then" }
{ Type = "Expr"; Name = "Else" } ] } { Type = "Expr"; Name = "Else" } ] }
{ Name = "Variable" { Name = "Variable"
Fields = [ { Type = "Name"; Name = "Value" } ] } Fields = [ { Type = "Token"; Name = "Value" } ] }
{ Name = "List" { Name = "List"
Fields = [ { Type = "Expr[]"; Name = "Elements" } ] } Fields = [ { Type = "Expr[]"; Name = "Elements" } ] }
{ Name = "Variant" { Name = "Variant"
Fields = [ { Type = "Name"; Name = "Tag" }; { Type = "Expr?"; Name = "Argument" } ] } Fields = [ { Type = "Token"; Name = "Tag" }; { Type = "Expr?"; Name = "Argument" } ] }
{ Name = "Record" { Name = "Record"
Fields = Fields =
[ { Type = "Field[]" [ { Type = "Field[]"
Name = "Extensions" } Name = "Extensions" }
{ Type = "BaseRecord?"; Name = "Base" } ] } { Type = "BaseRecord?"; Name = "Base" } ] }
{ Name = "Selector" { Name = "Selector"
Fields = [ { Type = "Expr"; Name = "Left" }; { Type = "Name"; Name = "FieldName" } ] } Fields = [ { Type = "Expr"; Name = "Left" }; { Type = "Token"; Name = "FieldName" } ] }
{ Name = "Indexer" { Name = "Indexer"
Fields = [ { Type = "Expr"; Name = "Left" }; { Type = "Expr"; Name = "Index" } ] } Fields = [ { Type = "Expr"; Name = "Left" }; { Type = "Expr"; Name = "Index" } ] }
{ Name = "Call" { Name = "Call"
@ -49,11 +49,11 @@ let exprTypes =
let patternTypes = let patternTypes =
[ { Name = "SimplePattern" [ { Name = "SimplePattern"
Fields = [ { Type = "Name?"; Name = "Identifier" } ] } Fields = [ { Type = "Token?"; Name = "Identifier" } ] }
{ Name = "VariantPattern" { Name = "VariantPattern"
Fields = [ { Type = "Name"; Name = "Tag" }; { Type = "Pattern?"; Name = "Argument" } ] } Fields = [ { Type = "Token"; Name = "Tag" }; { Type = "Pattern?"; Name = "Argument" } ] }
{ Name = "FieldPattern" { Name = "FieldPattern"
Fields = [ { Type = "Name"; Name = "Name" }; { Type = "Pattern?"; Name = "Pattern" } ] } Fields = [ { Type = "Token"; Name = "Name" }; { Type = "Pattern?"; Name = "Pattern" } ] }
{ Name = "RecordPattern" { Name = "RecordPattern"
Fields = Fields =
[ { Type = "FieldPattern[]" [ { Type = "FieldPattern[]"