Skip to main content
Fixed sample code
Source Link
class Animal:
  var greeting = null
  var name = null

  func Greet():
    print(greeting)

class Dog extends Animal:
  constvar greeting = "Woof!"

  func Dog(_name):
    name = _name

class Cat extends Animal:
  constvar greeting = "Meow!"

  func Cat(_name):
    name = _name

for (i, i < 5, i++):
  print(i)

for x...10:
  print(x)

dictionary = {1: "one", 2: "two", 3: "three"}

list = [4, 5, 6]

for item in list:
  print(item)

if 2==2:
  print("TRUE")

func Add(x, y):
  #When a return statement is present, it must return a value
  return x+y

try:
  Add(3, 5)
catch Exception:
  print("Exception")

thing = 55
match thing:
  case thing < 40, thing > 30:
    print("It is less than 40 and greater than 30")
  case thing == 55:
    print("It equals 55")

enum Planet:
  Mercury = 1
  Venus = 2
  Earth = 3
class Animal:
  var greeting = null
  var name = null

  func Greet():
    print(greeting)

class Dog extends Animal:
  const greeting = "Woof!"

  func Dog(_name):
    name = _name

class Cat extends Animal:
  const greeting = "Meow!"

  func Cat(_name):
    name = _name

for (i, i < 5, i++):
  print(i)

for x...10:
  print(x)

dictionary = {1: "one", 2: "two", 3: "three"}

list = [4, 5, 6]

for item in list:
  print(item)

if 2==2:
  print("TRUE")

func Add(x, y):
  #When a return statement is present, it must return a value
  return x+y

try:
  Add(3, 5)
catch Exception:
  print("Exception")

thing = 55
match thing:
  case thing < 40, thing > 30:
    print("It is less than 40 and greater than 30")
  case thing == 55:
    print("It equals 55")

enum Planet:
  Mercury = 1
  Venus = 2
  Earth = 3
class Animal:
  var greeting = null
  var name = null

  func Greet():
    print(greeting)

class Dog extends Animal:
  var greeting = "Woof!"

  func Dog(_name):
    name = _name

class Cat extends Animal:
  var greeting = "Meow!"

  func Cat(_name):
    name = _name

for (i, i < 5, i++):
  print(i)

for x...10:
  print(x)

dictionary = {1: "one", 2: "two", 3: "three"}

list = [4, 5, 6]

for item in list:
  print(item)

if 2==2:
  print("TRUE")

func Add(x, y):
  #When a return statement is present, it must return a value
  return x+y

try:
  Add(3, 5)
catch Exception:
  print("Exception")

thing = 55
match thing:
  case thing < 40, thing > 30:
    print("It is less than 40 and greater than 30")
  case thing == 55:
    print("It equals 55")

enum Planet:
  Mercury = 1
  Venus = 2
  Earth = 3
Source Link

Complex parser in C#

Last time I asked for a review of my tokenizer, and I would like to thank for all of the feedback! This time I wrote a parser for my language. The parser generates an AST (abstract syntax tree), from which (later) I will generate code in an intermediate language. Each node will have a Generate() method outputting the code. Compilation scheme

##About the language The language is called Pearfect and the source files extension is *.pear. Below you can see a sample code of the language. Plese note these things:

  • Members whose names are in capital are public, others are private (the same as protected in C#)
  • Classes cannot be nested
  • Every return statement must be followed by an expression
  • There is no Main() method, execution starts at the top
  • Enums can have any value assigned

class Animal:
  var greeting = null
  var name = null

  func Greet():
    print(greeting)

class Dog extends Animal:
  const greeting = "Woof!"

  func Dog(_name):
    name = _name

class Cat extends Animal:
  const greeting = "Meow!"

  func Cat(_name):
    name = _name

for (i, i < 5, i++):
  print(i)

for x...10:
  print(x)

dictionary = {1: "one", 2: "two", 3: "three"}

list = [4, 5, 6]

for item in list:
  print(item)

if 2==2:
  print("TRUE")

func Add(x, y):
  #When a return statement is present, it must return a value
  return x+y

try:
  Add(3, 5)
catch Exception:
  print("Exception")

thing = 55
match thing:
  case thing < 40, thing > 30:
    print("It is less than 40 and greater than 30")
  case thing == 55:
    print("It equals 55")

enum Planet:
  Mercury = 1
  Venus = 2
  Earth = 3

##Errors The compiler should also inform about the syntax errors in the code. You can see an example here: Errors

##Review I would like to hear what do you think about my idea? How can my code be corrected, both when it comes to style and design? What have I forgotten about? The full code is available in my GitHub repository. If spaces look unnaturally on GitHub, this is caused by the fact I have switched from Xamarin Studio to Visual Studio.

###Parser.cs

using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using PearfectCompiler.ErrorReporter;
using PearfectCompiler.LexicalAnalysis;
using PearfectCompiler.SyntaxAnalysis.AST;
using System.Linq;

namespace PearfectCompiler.SyntaxAnalysis
{
    public class Parser
    {
        private Tokenizer tokenizer;

        public Parser(string source)
        {
            tokenizer = new Tokenizer(source);
        }

        public Application IsApplication()
        {
            if (tokenizer.Peek() == null)
            {
                return null;
            }

            Token token = tokenizer.Peek();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            List<Node> statements = new List<Node>();
            List<Node> members = new List<Node>();


            bool commaSeperated = false;

            do
            {
                commaSeperated = false;

                Node member = IsApplicationMember();
                Node statement = null;

                if (member == null)
                {
                    statement = IsStatement();
                }

                if (statement == null && member == null)
                {
                    int currentLineNumber = tokenizer.Peek().LineNumber;
                    int currentPosition = tokenizer.Peek().Position;

                    while (tokenizer.Peek() != null && tokenizer.Peek().LineNumber == currentLineNumber)
                    {
                        tokenizer.Get();
                    }

                    while (tokenizer.Peek() != null && tokenizer.Peek().Position > position)
                    {
                        tokenizer.Get();
                    }

                    Reporter.Report(ErrorType.Error, "The member is not valid",
                                    currentLineNumber, currentPosition);
                }

                if (statement != null)
                {
                    statements.Add(statement);
                }

                if (member != null)
                {
                    members.Add(member);
                }

                if (tokenizer.Peek() != null 
                    && member == null 
                    && tokenizer.Peek().Kind == TokenKind.Comma)
                {
                    tokenizer.Get();
                    commaSeperated = true;
                }

            } while (tokenizer.Peek() != null && (tokenizer.Peek().Position == position || commaSeperated));

            return new Application(members.ToArray(), statements.ToArray(), lineNumber, position);
        }

        public Node IsApplicationMember()
        {
            Node member = (Node)IsPackage() ??
                          (Node)IsClass() ??
                          (Node)IsEnumeration() ??
                          (Node)IsFunction() ??
                          (Node)IsVariable() ??
                          (Node)IsConstant();

            if (member is Function function)
            {
                member = new Function(function.Name,
                                      function.Parameters,
                                      function.Body,
                                      true,
                                      function.IsPublic,
                                      function.LineNumber,
                                      function.Position);
            }

            if (member is Variable variable)
            {
                member = new Variable(variable.Name,
                                      variable.Value,
                                      true,
                                      variable.IsPublic,
                                      variable.LineNumber,
                                      variable.Position);
            }

            return member;
        }

        public Package IsPackage()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Package)
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            List<Node> members = new List<Node>();

            if (tokenizer.Peek() == null)
            {
                Reporter.Report(ErrorType.Error, "Package cannot be empty", lineNumber, position);
            }

            int memberPosition = tokenizer.Peek().Position;

            do
            {
                Node member = IsPackageMember();

                if (member == null)
                {
                    int currentLineNumber = tokenizer.Peek().LineNumber;
                    int currentPosition = tokenizer.Peek().Position;

                    while (tokenizer.Peek() != null && tokenizer.Peek().LineNumber == currentLineNumber)
                    {
                        tokenizer.Get();
                    }

                    while (tokenizer.Peek() != null && tokenizer.Peek().Position > memberPosition)
                    {
                        tokenizer.Get();
                    }

                    Reporter.Report(ErrorType.Error, "Invalid package member", currentLineNumber, currentPosition);
                }
              
                members.Add(member);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Position == memberPosition);

            return new Package(name, members.ToArray(), lineNumber, position);
        }

        public Node IsPackageMember()
        {
            Node member = (Node)IsClass() ??
                          (Node)IsEnumeration() ??
                          (Node)IsFunction() ??
                          (Node)IsVariable() ??
                          (Node)IsConstant();

            if (member is Function function)
            {
                member = new Function(function.Name, function.Parameters, function.Body,
                                      true, function.IsPublic, function.LineNumber, function.Position);
            }

            if (member is Variable variable)
            {
                member = new Variable(variable.Name, variable.Value, true, variable.IsPublic, variable.LineNumber, variable.Position);
            }

            return member;
        }

        public Class IsClass()
        {
            if (tokenizer.Peek() == null || (tokenizer.Peek().Kind != TokenKind.Class && tokenizer.Peek().Kind != TokenKind.Object))
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            bool isStatic = false;

            if (token.Kind == TokenKind.Object)
            {
                isStatic = true;
            }

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (name != null && name.Localization.Length > 1)
            {
                Reporter.Report(ErrorType.Error, "Class can only be declared in the current scope", lineNumber, position);
            }

            bool isPublic = false;

            if (name != null && char.IsUpper(((IdentifierName)name.Localization[0]).Name[0]))
            {
                isPublic = true;
            }

            Identifier extends = null;

            if (isStatic == false && tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Extends)
            {
                tokenizer.Get();

                extends = IsIdentifier();

                if (extends == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected base class identifier", lineNumber, position);
                }

            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            List<Node> members = new List<Node>();

            if (tokenizer.Peek() == null)
            {
                Reporter.Report(ErrorType.Error, "Class cannot be empty", lineNumber, position);
            }

            int memberPosition = tokenizer.Peek().Position;

            do
            {
                Node member = IsClassMember();

                if (member == null)
                {
                    int currentLineNumber = tokenizer.Peek().LineNumber;
                    int currentPosition = tokenizer.Peek().Position;

                    while (tokenizer.Peek() != null && tokenizer.Peek().LineNumber == currentLineNumber)
                    {
                        tokenizer.Get();
                    }

                    while (tokenizer.Peek() != null && tokenizer.Peek().Position > memberPosition)
                    {
                        tokenizer.Get();
                    }

                    Reporter.Report(ErrorType.Error, "Invalid class member", currentLineNumber, currentPosition);
                }

                if (isStatic)
                {
                    if (member is Function function)
                    {
                        member = new Function(function.Name, function.Parameters, function.Body, true,
                                              function.IsPublic, function.LineNumber, function.Position);
                    }

                    if (member is Variable variable)
                    {
                        member = new Variable(variable.Name, variable.Value, true, variable.IsPublic,
                                              variable.LineNumber, variable.Position);
                    }
                }
                
                members.Add(member);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Position == memberPosition);

            return new Class(name, extends, members.ToArray(), isPublic, isStatic, lineNumber, position);
                
        }

        public Node IsClassMember()
        {
            Node member = (Node)IsFunction() ?? 
                          (Node)IsVariable() ?? 
                          (Node)IsConstant();
            return member;
        }

        public Enumeration IsEnumeration()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Enum)
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (name != null && name.Localization.Length > 1)
            {
                Reporter.Report(ErrorType.Error, "Enumeration can only be declared in the current scope", lineNumber, position);
            }

            bool isPublic = false;

            if (name != null && char.IsUpper(((IdentifierName)name.Localization[0]).Name[0]))
            {
                isPublic = true;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            List<Identifier> names = new List<Identifier>();
            List<Expression> values = new List<Expression>();


            do
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                Identifier valueName = IsIdentifier();

                if (valueName == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected enumeration name", currentLineNumber, currentPosition);
                }

                if (tokenizer.Peek() != null 
                    && tokenizer.Peek().Kind == TokenKind.Assignment)
                {
                    currentLineNumber = tokenizer.Peek().LineNumber;
                    currentPosition = tokenizer.Peek().Position;
                    tokenizer.Get();

                    Expression value = IsExpression();

                    if (value == null)
                    {
                        Reporter.Report(ErrorType.Error, "Expected enumeration value expression", currentLineNumber, currentPosition);
                    }

                    values.Add(value);

                }
                else
                {
                    values.Add(new Expression(null, currentLineNumber, currentPosition));
                }

                names.Add(valueName);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Comma);

            return new Enumeration(name, names.ToArray(), values.ToArray(), isPublic, lineNumber, position);

        }

        public Constant IsConstant()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Const)
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (name != null && name.Localization.Length > 1)
            {
                Reporter.Report(ErrorType.Error, "Const can only be declared in the current scope",
                                lineNumber, position);
            }

            bool isPublic = false;

            if (name != null && char.IsUpper(((IdentifierName)name.Localization[0]).Name[0]))
            {
                isPublic = true;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Assignment)
            {
                Reporter.Report(ErrorType.Error, "Expected assignment token", lineNumber, position);
            }

            tokenizer.Get();

            Node value = IsLiteral();

            if (value == null)
            {
                Reporter.Report(ErrorType.Error, "Constant must have a value", lineNumber, position);
            }

            return new Constant(name, value, isPublic, lineNumber, position);
        }

        public Variable IsVariable()
        {
            if (tokenizer.Peek() == null
                || (tokenizer.Peek().Kind != TokenKind.Var
                && tokenizer.Peek().Kind != TokenKind.Svar))
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            bool isStatic = false;

            if (token.Kind == TokenKind.Svar)
            {
                isStatic = true;
            }

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (name != null && name.Localization.Length > 1)
            {
                Reporter.Report(ErrorType.Error, "Variable can only be declared in the current scope", lineNumber, position);
            }

            bool isPublic = false;

            if (name != null && char.IsUpper(((IdentifierName)name.Localization[0]).Name[0]))
            {
                isPublic = true;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Assignment)
            {
                Reporter.Report(ErrorType.Error, "Expected assignment token", lineNumber, position);
            }

            tokenizer.Get();

            Expression value = IsExpression();

            if (value == null)
            {
                Reporter.Report(ErrorType.Error, "Variable must have a value", lineNumber, position);
            }

            return new Variable(name, value, isStatic, isPublic, lineNumber, position); 

        }

        public Function IsFunction()
        {
            if (tokenizer.Peek() == null || (tokenizer.Peek().Kind != TokenKind.Func && tokenizer.Peek().Kind != TokenKind.Sfunc))
            {
                return null;
            }

            Token token = tokenizer.Get();

            int lineNumber = token.LineNumber;
            int position = token.Position;

            bool isStatic = false;

            if (token.Kind == TokenKind.Sfunc)
            {
                isStatic = true;
            }

            Identifier name = IsIdentifier();

            if (name == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (name != null && name.Localization.Length > 1)
            {
                Reporter.Report(ErrorType.Error, "Function can only be declared in the current scope", lineNumber, position);
            }

            bool isPublic = false;

            if (name != null && char.IsUpper(((IdentifierName)name.Localization[0]).Name[0]))
            {
                isPublic = true;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.OpenParenthesis)
            {
                Reporter.Report(ErrorType.Error, "Expected parameter list (open-parenthesis)", lineNumber, position);
            }

            tokenizer.Get();

            ParameterList parameters = null;

            if (tokenizer.Peek() != null && tokenizer.Peek().Kind != TokenKind.CloseParenthesis)
            {
                parameters = IsParameterList();

                if (parameters == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected parameter list", lineNumber, position);
                }
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseParenthesis)
            {
                Reporter.Report(ErrorType.Error, "No closing parenthesis", lineNumber, position);
            }

            tokenizer.Get();

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block (colon)", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected function body", lineNumber, position);
            }

            return new Function(name, parameters, body, isStatic, isPublic, lineNumber, position);

        }

        public MatchStatement IsMatchStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Match)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Identifier variable = IsIdentifier();

            if (variable == null)
            {
                Reporter.Report(ErrorType.Error, "Expected identifier", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected end of statement definition", lineNumber, position);
            }

            tokenizer.Get();

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Case)
            {
                Reporter.Report(ErrorType.Error, "Match statement has no cases", lineNumber, position);
            }

            List<Expression[]> cases = new List<Expression[]>();
            List<Block> bodies = new List<Block>();

            do
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                List<Expression> conditions = new List<Expression>();

                do
                {
                    int _currentLineNumber = tokenizer.Peek().LineNumber;
                    int _currentPosition = tokenizer.Peek().Position;

                    tokenizer.Get();

                    Expression condition = IsExpression();

                    if (condition == null)
                    {
                        Reporter.Report(ErrorType.Error, "Invalid case condition", _currentLineNumber, _currentPosition);
                    }

                    conditions.Add(condition);

                } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Comma);

                cases.Add(conditions.ToArray());

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "No case's block definition", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                Block body = IsBlock();

                if (body == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                bodies.Add(body);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Case);

            Block defaultBody = null;

            if (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Default)
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "No case's block definition", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                defaultBody = IsBlock();

                if (defaultBody == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }
                
            }

            return new MatchStatement(variable, cases.ToArray(), bodies.ToArray(), defaultBody, lineNumber, position);

        }

        public ReturnStatement IsReturnStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Return)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Expression value = IsExpression();

            if (value == null)
            {
                Reporter.Report(ErrorType.Error, "Expected return value expression", lineNumber, position);
            }

            return new ReturnStatement(value, lineNumber, position);
        }

        public BreakStatement IsBreakStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Break)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            return new BreakStatement(lineNumber, position);
        }

        public ContinueStatement IsContinueStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Continue)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            return new ContinueStatement(lineNumber, position);
        }

        public DoStatement IsDoStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Do)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Expression condition = IsExpression();

            if (condition == null)
            {
                Reporter.Report(ErrorType.Error, "Expected condition", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            return new DoStatement(condition, body, lineNumber, position);

        }

        public WhileStatement IsWhileStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.While)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Expression condition = IsExpression();

            if (condition == null)
            {
                Reporter.Report(ErrorType.Error, "Expected condition", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            return new WhileStatement(condition, body, lineNumber, position);

        }

        public ForRangeStatement IsForRangeStatement()
        {
            Token[] tokens = tokenizer.Peek(6);

            if (tokens.Length < 6
               || tokens[0].Kind != TokenKind.For
               || tokens[1].Kind != TokenKind.Identifier
               || tokens[2].Kind != TokenKind.Dot
               || tokens[3].Kind != TokenKind.Dot
               || tokens[4].Kind != TokenKind.Dot
               || tokens[5].Kind != TokenKind.IntegerLiteral)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Identifier counter = IsIdentifier();

            if (counter == null)
            {
                Reporter.Report(ErrorType.Error, "Expected counter identifier", lineNumber, position);
            }

            tokenizer.Get(3);

            int currentLineNumber = tokenizer.Peek().LineNumber;
            int currentPosition = tokenizer.Peek().Position;

            IntegerLiteral upper = IsIntegerLiteral();

            if (upper == null)
            {
                Reporter.Report(ErrorType.Error, "Expected integer literal", currentLineNumber, currentPosition);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            return new ForRangeStatement(counter, upper, body, lineNumber, position);

        }

        public ForInStatement IsForInStatement()
        {
            Token[] tokens = tokenizer.Peek(3);

            if (tokens.Length < 3
               || tokens[0].Kind != TokenKind.For
               || tokens[1].Kind != TokenKind.Identifier
               || tokens[2].Kind != TokenKind.In)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            Identifier item = IsIdentifier();

            if (item == null)
            {
                Reporter.Report(ErrorType.Error, "Expected item identifier", lineNumber, position);
            }

            tokenizer.Get();

            Identifier collection = IsIdentifier();

            if (collection == null)
            {
                Reporter.Report(ErrorType.Error, "Expected collection identifier", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            return new ForInStatement(item, collection, body, lineNumber, position);


        }

        public ForStatement IsForStatement()
        {
            Token[] tokens = tokenizer.Peek(2);

            if (tokens.Length < 2
               || tokens[0].Kind != TokenKind.For
               || tokens[1].Kind != TokenKind.OpenParenthesis)
            {
                return null;
            }

            tokenizer.Get(2);

            int lineNumber = tokens[0].LineNumber;
            int position = tokens[0].Position;

            Expression counter = IsExpression();

            if (counter == null)
            {
                Reporter.Report(ErrorType.Error, "Expected counter", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Comma)
            {
                Reporter.Report(ErrorType.Error, "No separating comma", lineNumber, position);
            }

            tokenizer.Get();

            Expression condition = IsExpression();

            if (condition == null)
            {
                Reporter.Report(ErrorType.Error, "Expected condition", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Comma)
            {
                Reporter.Report(ErrorType.Error, "No separating comma", lineNumber, position);
            }

            tokenizer.Get();

            Expression step = IsExpression();

            if (step == null)
            {
                Reporter.Report(ErrorType.Error, "Expected step", lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseParenthesis)
            {
                Reporter.Report(ErrorType.Error, "No closing parenthesis", tokens[1].LineNumber, tokens[1].Position);
            }

            tokenizer.Get();

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected block", lineNumber, position);
            }

            tokenizer.Get();

            Block body = IsBlock();

            if (body == null)
            {
                Reporter.Report(ErrorType.Error, "Expected for-statement body", lineNumber, position);
            }

            return new ForStatement(counter, condition, step, body, lineNumber, position);


        }

        public IfStatement IsIfStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.If)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            List<Expression> conditions = new List<Expression>();
            List<Block> bodies = new List<Block>();
            Block elseBody = null;

            do
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                Expression condition = IsExpression();

                if (condition == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected condition", currentLineNumber, currentPosition);
                }

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                Block body = IsBlock();

                if (body == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                conditions.Add(condition);
                bodies.Add(body);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Elif);

            if (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Else)
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                Block body = IsBlock();

                if (body == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                elseBody = body;
            }

            return new IfStatement(conditions.ToArray(), bodies.ToArray(), elseBody, lineNumber, position);

        }

        public TryStatement IsTryStatement()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Try)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get();

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Expected condition", lineNumber, position);
            }

            tokenizer.Get();

            Block tryBody = IsBlock();

            if (tryBody == null)
            {
                Reporter.Report(ErrorType.Error, "Expected condition", lineNumber, position);
            }

            List<Identifier> types = new List<Identifier>();
            List<Block> bodies = new List<Block>();
            Block defaultCatchBody = null;

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Catch)
            {
                return new TryStatement(tryBody, types.ToArray(), bodies.ToArray(), defaultCatchBody, lineNumber, position);
            }

            do
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                Identifier type = IsIdentifier();

                if (type == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected exception type", currentLineNumber, currentPosition);
                }

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                Block body = IsBlock();

                if (body == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                types.Add(type);
                bodies.Add(body);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Catch && tokenizer.Peek(2)[1].Kind == TokenKind.Identifier);

            if (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Catch)
            {
                int currentLineNumber = tokenizer.Peek().LineNumber;
                int currentPosition = tokenizer.Peek().Position;

                tokenizer.Get();

                if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                tokenizer.Get();

                Block body = IsBlock();

                if (body == null)
                {
                    Reporter.Report(ErrorType.Error, "Expected block", currentLineNumber, currentPosition);
                }

                defaultCatchBody = body;
            }

            return new TryStatement(tryBody, types.ToArray(), bodies.ToArray(), defaultCatchBody, lineNumber, position);

        }

        public Node IsStatement()
        {
            Node statement = (Node)IsInjection() ??
                             (Node)IsMatchStatement() ??
                             (Node)IsTryStatement() ??
                             (Node)IsReturnStatement() ??
                             (Node)IsContinueStatement() ??
                             (Node)IsBreakStatement() ??
                             (Node)IsDoStatement() ?? 
                             (Node)IsWhileStatement() ??
                             (Node)IsForRangeStatement() ??
                             (Node)IsForInStatement() ??
                             (Node)IsForStatement() ??
                             (Node)IsIfStatement() ??
                             (Node)IsExpression() ??
                             (Node)IsFunctionCall();


            return statement;
        }

        public Block IsBlock()
        {
            if (tokenizer.Peek() == null)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            List<Node> statements = new List<Node>();

            bool commaSeperated = false;

            do
            {
                commaSeperated = false;

                Node statement = IsStatement();

                if (statement == null && statements.Count == 0)
                {
                    return null;
                }

                if (statement == null)
                {
                    int currentLineNumber = tokenizer.Peek().LineNumber;
                    int currentPosition = tokenizer.Peek().Position;

                    while (tokenizer.Peek() != null && tokenizer.Peek().LineNumber == currentLineNumber)
                    {
                        tokenizer.Get();
                    }

                    Reporter.Report(ErrorType.Error, "The statement is not valid", currentLineNumber, currentPosition);
                }

                statements.Add(statement);

                if (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Comma)
                {
                    tokenizer.Get();
                    commaSeperated = true;
                }

            } while (tokenizer.Peek() != null && (tokenizer.Peek().Position == position || commaSeperated));

            return new Block(statements.ToArray(), lineNumber, position);

        }

        public ListLiteral IsListLiteral()
        {
            Token openBracket = tokenizer.Peek();

            if (openBracket == null || openBracket.Kind != TokenKind.OpenBracket)
            {
                return null;
            }

            tokenizer.Get();

            int lineNumber = openBracket.LineNumber;
            int position = openBracket.Position;

            if (tokenizer.Peek().Kind == TokenKind.CloseBracket)
            {
                return new ListLiteral(null, lineNumber, position);
            }

            ExpressionList expressionList = IsExpressionList();

            if (expressionList == null)
            {
                Reporter.Report(ErrorType.Error, "Invalid list literal", openBracket.LineNumber, openBracket.Position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseBracket)
            {
                Reporter.Report(ErrorType.Error, "No closing bracket", openBracket.LineNumber, openBracket.Position);
            }

            tokenizer.Get();

            return new ListLiteral(expressionList, lineNumber, position);

        }

        public DictionaryLiteral IsDictionaryLiteral()
        {
            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.OpenBrace)
            {
                return null;
            }

            Token openBraceToken = tokenizer.Get();
            
            int lineNumber = openBraceToken.LineNumber;
            int position = openBraceToken.Position;

            List<Expression> keys = new List<Expression>();
            List<Expression> values = new List<Expression>();

            Expression key = IsExpression();

            if (key == null)
            {
                Reporter.Report(ErrorType.Error, "Invalid key", lineNumber, position);
            }

            if (tokenizer.Peek().Kind != TokenKind.Colon)
            {
                Reporter.Report(ErrorType.Error, "Missing colon", lineNumber, position);
            }

            tokenizer.Get();

            Expression value = IsExpression();

            if (value == null)
            {
                Reporter.Report(ErrorType.Error, "Invalid value", lineNumber, position);
            }

            keys.Add(key);
            values.Add(value);

            if (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.CloseBrace)
            {
                tokenizer.Get();
                return new DictionaryLiteral(keys.ToArray(), values.ToArray(), lineNumber, position);
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Comma)
            {
                Reporter.Report(ErrorType.Error, "Expected comma separator", lineNumber, position);
            }

            do
            {
                Token commaToken = tokenizer.Get();

                key = IsExpression();

                if (key == null)
                {
                    Reporter.Report(ErrorType.Error, "Invalid key", commaToken.LineNumber, commaToken.Position);
                }

                if (tokenizer.Peek().Kind != TokenKind.Colon)
                {
                    Reporter.Report(ErrorType.Error, "Missing colon", commaToken.LineNumber, commaToken.Position);
                }

                tokenizer.Get();

                value = IsExpression();

                if (value == null)
                {
                    Reporter.Report(ErrorType.Error, "Invalid value", commaToken.LineNumber, commaToken.Position);
                }

                keys.Add(key);
                values.Add(value);

            } while (tokenizer.Peek() != null
                     && tokenizer.Peek().Kind == TokenKind.Comma);

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseBrace)
            {
                Reporter.Report(ErrorType.Error, "No closing brace", values[values.Count() - 1].LineNumber,  values[values.Count() - 1].Position);
            }

            tokenizer.Get();
            return new DictionaryLiteral(keys.ToArray(), values.ToArray(), lineNumber, position);

        }


        public ChainedFunctionCall IsChainedFunctionCall()
        {

            Token[] tokens = tokenizer.Peek(2);

            if (tokens.Length < 2
               || tokens[0].Kind != TokenKind.Dot
               || tokens[1].Kind != TokenKind.Identifier)
            {
                return null;
            }

            tokenizer.Get();

            Identifier name = IsIdentifier();

            int lineNumber = name.LineNumber;
            int position = name.Position;
            bool passesArguments = true;

            if (tokenizer.Peek() == null 
                || tokenizer.Peek().Kind != TokenKind.OpenParenthesis)
            {
                Reporter.Report(ErrorType.Error, "Expected argument list", lineNumber, position);
            }

            tokenizer.Get();
            
            ExpressionList arguments = IsExpressionList();

            if (arguments == null)
            {
                passesArguments = false;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseParenthesis)
            {
                Reporter.Report(ErrorType.Error, "No closing parenthesis", tokens[1].LineNumber, tokens[1].Position);
            }

            tokenizer.Get();

            bool isChained = false;
            ChainedFunctionCall chainedCall = IsChainedFunctionCall();

            if (chainedCall != null)
            {
                isChained = true;
            }

            return new ChainedFunctionCall(name, passesArguments, arguments, isChained, chainedCall, lineNumber, position);

        }

        public Injection IsInjection()
        {
            Token[] tokens = tokenizer.Peek(3);

            if (tokens.Length < 3 || tokens[0].Kind != TokenKind.OpenBracket || tokens[1].Kind != TokenKind.VMInjection || tokens[2].Kind != TokenKind.CloseBracket)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            tokenizer.Get(3);

            List<byte> codes = new List<byte>();
            List<int> operands1 = new List<int>();
            List<int> operands2 = new List<int>();

            while (tokenizer.Peek().Kind == TokenKind.IntegerLiteral)
            {
                bool check = byte.TryParse(tokenizer.Get().Lexeme, out byte code);
                if (!check)
                {
                    Reporter.Report(ErrorType.Error, "The operation code must be a byte value", lineNumber, position);
                }

                if (tokenizer.Peek().Kind != TokenKind.IntegerLiteral)
                {
                    Reporter.Report(ErrorType.Error, "Expected first operand", lineNumber, position); 
                }

                check = int.TryParse(tokenizer.Get().Lexeme, out int operand1);
                if (!check)
                {
                    Reporter.Report(ErrorType.Error, "The operand must be a valid 32 bit integer value", lineNumber, position);
                }

                if (tokenizer.Peek().Kind != TokenKind.IntegerLiteral)
                {
                    Reporter.Report(ErrorType.Error, "Expected second operand", lineNumber, position);
                }

                check = int.TryParse(tokenizer.Get().Lexeme, out int operand2);
                if (!check)
                {
                    Reporter.Report(ErrorType.Error, "The operand must be a valid 32 bit integer value", lineNumber, position);
                }

                codes.Add(code);
                operands1.Add(operand1);
                operands2.Add(operand2);
            
            }

            tokens = tokenizer.Peek(3);

            if (tokens.Length < 3 || tokens[0].Kind != TokenKind.OpenBracket || tokens[1].Kind != TokenKind.VMInjection || tokens[2].Kind != TokenKind.CloseBracket)
            {
                Reporter.Report(ErrorType.Error, "Expected end of injection statement", lineNumber, position);
            }

            tokenizer.Get(3);

            return new Injection(codes.ToArray(), operands1.ToArray(), operands2.ToArray(), lineNumber, position);

        }


        public FunctionCall IsFunctionCall()
        {

            Token[] tokens = tokenizer.Peek(2);

            if (tokens.Length < 2|| tokens[0].Kind != TokenKind.Identifier || tokens[1].Kind != TokenKind.OpenParenthesis)
            {
                return null;
            }

            Identifier name = IsIdentifier();

            tokenizer.Get();

            int lineNumber = name.LineNumber;
            int position = name.Position;
            bool passesArguments = true;

            ExpressionList arguments = IsExpressionList();

            if (arguments == null)
            {
                passesArguments = false;
            }

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.CloseParenthesis)
            {
                Reporter.Report(ErrorType.Error, "No closing parenthesis", tokens[1].LineNumber, tokens[1].Position);
            }

            tokenizer.Get();

            bool isChained = false;
            ChainedFunctionCall chainedCall = IsChainedFunctionCall();

            if (chainedCall != null)
            {
                isChained = true;
            }


            return new FunctionCall(name, passesArguments, arguments, isChained, chainedCall, lineNumber, position);

        }

        public ParameterList IsParameterList()
        {
            List<Identifier> identifiers = new List<Identifier>();

            Identifier identifier = IsIdentifier();

            if (identifier == null)
            {
                return null;
            }

            int lineNumber = identifier.LineNumber;
            int position = identifier.Position;

            identifiers.Add(identifier);

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Comma)
            {
                return new ParameterList(identifiers.ToArray(), lineNumber, position);
            }


            do
            {
                Token commaToken = tokenizer.Get();

                identifier = IsIdentifier();
                if (identifier == null)
                {
                    Reporter.Report(ErrorType.Error, "Invalid identifier", commaToken.LineNumber, commaToken.Position);
                }
                identifiers.Add(identifier);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Comma);

            return new ParameterList(identifiers.ToArray(), lineNumber, position);

        }

        public ExpressionList IsExpressionList()
        {
            List<Expression> expressions = new List<Expression>();

            Expression expression = IsExpression();

            if (expression == null)
            {
                return null;
            }

            int lineNumber = expression.LineNumber;
            int position = expression.Position;

            expressions.Add(expression);

            if (tokenizer.Peek() == null || tokenizer.Peek().Kind != TokenKind.Comma)
            {
                return new ExpressionList(expressions.ToArray(), lineNumber, position);
            }


            do
            {
                Token commaToken = tokenizer.Get();

                expression = IsExpression();
                if (expression == null)
                {
                    Reporter.Report(ErrorType.Error, "Invalid expression", commaToken.LineNumber, commaToken.Position);
                }
                expressions.Add(expression);

            } while (tokenizer.Peek() != null && tokenizer.Peek().Kind == TokenKind.Comma);

            return new ExpressionList(expressions.ToArray(), lineNumber, position);

        }

        public Expression IsExpression()
        {

            Stack<Token> stack = new Stack<Token>();
            List<Node> postfix = new List<Node>();

            Token token = tokenizer.Peek();

            if (token == null)
            {
                return null;
            }

            int lineNumber = token.LineNumber;
            int position = token.Position;

            while ((token = tokenizer.Peek()) != null 
                  && token.LineNumber == lineNumber 
                  && (Attributes.CheckAttribute<LiteralAttribute>(token.Kind)
                  || Attributes.CheckAttribute<OperatorAttribute>(token.Kind)
                  || Attributes.CheckAttribute<IdentifierAttribute>(token.Kind)
                  || token.Kind == TokenKind.OpenBrace
                  || token.Kind == TokenKind.OpenBracket                  
                  || token.Kind == TokenKind.OpenParenthesis
                  || token.Kind == TokenKind.CloseParenthesis
                  || token.Kind == TokenKind.Null
                  || token.Kind == TokenKind.True
                  || token.Kind == TokenKind.False))
            {

                if (Attributes.CheckAttribute<IdentifierAttribute>(token.Kind)
                   || Attributes.CheckAttribute<LiteralAttribute>(token.Kind)
                   || token.Kind == TokenKind.OpenBrace
                   || token.Kind == TokenKind.OpenBracket
                   || token.Kind == TokenKind.Null
                   || token.Kind == TokenKind.True
                   || token.Kind == TokenKind.False)
                {
                    postfix.Add(IsOperand());
                    continue;
                }

                if (token.Kind == TokenKind.OpenParenthesis)
                {
                    tokenizer.Get();
                    stack.Push(token);
                    continue;
                }

                if (token.Kind == TokenKind.CloseParenthesis)
                {
                    tokenizer.Get();
                    while (stack.Count != 0 && stack.Peek().Kind != TokenKind.OpenParenthesis)
                    {
                        tokenizer.Unget(stack.Pop());
                        postfix.Add(IsOperator());
                    }

                    if (stack.Count == 0)
                    {
                        tokenizer.Unget(token);
                        break;
                    }
                    stack.Pop();
                    continue;
                }

                if (Attributes.CheckAttribute<OperatorAttribute>(token.Kind))
                {
                    tokenizer.Get();
                    //the lowest precedence
                    int stackPeekPrecedence = 100;

                    while (stack.Count != 0)
                    {
                        if (Attributes.CheckAttribute<OperatorAttribute>(stack.Peek().Kind))
                        {
                            stackPeekPrecedence = Attributes.GetPrecedence(stack.Peek().Kind);
                        }
                        else
                        {
                            //the lowest precedence
                            stackPeekPrecedence = 100;
                        }

                        if (stackPeekPrecedence <= Attributes.GetPrecedence(token.Kind))
                        {
                            tokenizer.Unget(stack.Pop());
                            postfix.Add(IsOperator());
                        }
                        else
                        {
                            break;
                        }

                    }
                    stack.Push(token);
                }
            }

            while (stack.Count != 0)
            {
                if (stack.Peek().Kind == TokenKind.OpenParenthesis)
                {
                    Reporter.Report(ErrorType.Error, "No matching parenthesis", stack.Peek().LineNumber, stack.Peek().Position);
                    stack.Pop();
                }
                else
                {
                    tokenizer.Unget(stack.Pop());
                    postfix.Add(IsOperator());
                }
                
            }


            Stack<Node> treeStack = new Stack<Node>();

            foreach (Node node in postfix)
            {
                if (node is BinaryOperationNode binaryOperationNode)
                {
                    if (treeStack.Count < 2)
                    {
                        Reporter.Report(ErrorType.Error, "Missing operand", node.LineNumber, node.Position);
                        treeStack.Push(binaryOperationNode);
                    }
                    else
                    {
                        binaryOperationNode.Operand1 = treeStack.Pop();
                        binaryOperationNode.Operand2 = treeStack.Pop();

                        treeStack.Push(binaryOperationNode);
                    }
                }
                else if (node is UnaryOperationNode unaryOperationNode)
                {
                    if (treeStack.Count < 1)
                    {
                        Reporter.Report(ErrorType.Error, "Missing operand", node.LineNumber, node.Position);
                        treeStack.Push(unaryOperationNode);
                    }
                    else
                    {
                        unaryOperationNode.Operand = treeStack.Pop();

                        treeStack.Push(unaryOperationNode);
                    }
                }
                else
                {
                    treeStack.Push(node);
                }

            }


            if (treeStack.Count == 0)
            {
                return null;
            }

            Node result = treeStack.Pop();

            if (treeStack.Count >= 1)
            {
                Reporter.Report(ErrorType.Error, "Missing operator", treeStack.Peek().LineNumber, treeStack.Peek().Position);
            }

            return new Expression(result, lineNumber, position);

        }


        private Node IsOperand()
        {
            return (Node)IsLiteral() ??
                   (Node)IsConstantLiteral() ??
                   (Node)IsFunctionCall() ??
                   (Node)IsIdentifier();

        }


        private Identifier IsIdentifier()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.Identifier)
            {
                return null;
            }

            int lineNumber = tokenizer.Peek().LineNumber;
            int position = tokenizer.Peek().Position;

            List<Node> localization = new List<Node>();

            do
            {
                if (token.Kind == TokenKind.Dot)
                {
                    tokenizer.Get();
                    token = tokenizer.Peek();
                }
                
                if (token.Kind == TokenKind.Identifier)
                {
                    string[] names = token.Lexeme.Split('.');

                    foreach (string name in names)
                    {
                        localization.Add(new IdentifierName(name, token.LineNumber, token.Position));
                    }
                    tokenizer.Get();
                }

                if (token.Kind == TokenKind.OpenBracket)
                {
                    int openBracketLineNumber = tokenizer.Peek().LineNumber;
                    int openBracketPosition = tokenizer.Peek().Position;
                    tokenizer.Get();

                    ExpressionList indexList = IsExpressionList();

                    if (indexList == null)
                    {
                        Reporter.Report(ErrorType.Error, "Expected index", openBracketLineNumber, openBracketPosition);
                    }

                    localization.Add(indexList);

                    if (tokenizer.Peek() == null
                        || tokenizer.Peek().Kind != TokenKind.CloseBracket)
                    {
                        Reporter.Report(ErrorType.Error, "No closing bracket", openBracketLineNumber, openBracketPosition);
                    }

                    tokenizer.Get();

                }

            } while ((token = tokenizer.Peek()) != null &&
                     ((token.Kind == TokenKind.Identifier 
                      && !(localization[localization.Count() - 1] is IdentifierName))
                      || token.Kind == TokenKind.OpenBracket
                      || (tokenizer.Peek(2).Length == 2 && tokenizer.Peek(2)[0].Kind == TokenKind.Dot 
                      && tokenizer.Peek(2)[1].Kind == TokenKind.Identifier)));
                

            return new Identifier(localization.ToArray(), lineNumber, position);
        }

        private Node IsConstantLiteral()
        {
            return (Node)IsBoolConstant() ?? IsNullConstant();
        }

        private NullConstant IsNullConstant()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.Null)
            {
                return null;
            }

            tokenizer.Get();

            return new NullConstant(token.LineNumber, token.Position);
        }

        private BoolConstant IsBoolConstant()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.True && token.Kind != TokenKind.False)
            {
                return null;
            }

            tokenizer.Get();

            if (token.Kind == TokenKind.True)
            {
                return new BoolConstant(true, token.LineNumber, token.Position);
            }
            else
            {
                return new BoolConstant(false, token.LineNumber, token.Position);
            }

        }

        private Node IsLiteral()
        {
            return (Node)IsCharacterLiteral() ??
                   (Node)IsStringLiteral() ??
                   (Node)IsIntegerLiteral() ??
                   (Node)IsRealLiteral() ??
                   (Node)IsDecimalLiteral() ??
                   (Node)IsListLiteral() ??
                   (Node)IsDictionaryLiteral();
        }

        private CharacterLiteral IsCharacterLiteral()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.CharacterLiteral)
            {
                return null;
            }

            tokenizer.Get();

            string value = token.Lexeme;
            value = value.Remove(0, 1);
            value = value.Remove(value.Length - 1);

            try
            {
                value = Regex.Unescape(value);
            }
            catch (ArgumentException)
            {
                Reporter.Report(ErrorType.Error, "Invalid escape sequence", token.LineNumber, token.Position);
                return null;
            }

            char c;

            try
            {
                c = char.Parse(value);
            }
            catch (FormatException)
            {
                Reporter.Report(ErrorType.Error, "Too many characters in char literal", token.LineNumber, token.Position);
                return null;
            }

            return new CharacterLiteral(c, token.LineNumber, token.Position);

        }

        private StringLiteral IsStringLiteral()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.StringLiteral)
            {
                return null;
            }

            tokenizer.Get();

            string value = token.Lexeme;
            value = value.Remove(0, 1);
            value = value.Remove(value.Length - 1);

            try
            {
                value = Regex.Unescape(value);
            }
            catch (ArgumentException)
            {
                Reporter.Report(ErrorType.Error, "Invalid escape sequence", token.LineNumber, token.Position);
                return null;
            }

            return new StringLiteral(value, token.LineNumber, token.Position);

        }

        private IntegerLiteral IsIntegerLiteral()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.IntegerLiteral)
            {
                return null;
            }

            tokenizer.Get();

            bool isValid = Int64.TryParse(token.Lexeme, out long value);

            if (isValid)
            {
                return new IntegerLiteral(value, token.LineNumber, token.Position);
            }
            else
            {
                Reporter.Report(ErrorType.Error, "Integral constant is too large", token.LineNumber, token.Position);
                return null;
            }
        }

        private DoubleLiteral IsRealLiteral()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.RealLiteral)
            {
                return null;
            }

            tokenizer.Get();

            bool isValid = double.TryParse(token.Lexeme, out double value);

            if (isValid)
            {
                return new DoubleLiteral(value, token.LineNumber, token.Position);
            }
            else
            {
                Reporter.Report(ErrorType.Error, "Integral constant is too large", token.LineNumber, token.Position);
                return null;
            }
        }

        private DecimalLiteral IsDecimalLiteral()
        {
            Token token = tokenizer.Peek();

            if (token == null || token.Kind != TokenKind.DecimalLiteral)
            {
                return null;
            }

            tokenizer.Get();

            bool isValid = decimal.TryParse(token.Lexeme, out decimal value);

            if (isValid)
            {
                return new DecimalLiteral(value, token.LineNumber, token.Position);
            }
            else
            {
                Reporter.Report(ErrorType.Error, "Integral constant is too large", token.LineNumber, token.Position);
                return null;
            }
        }

        private Node IsOperator()
        {
            Node operation = (Node)IsBinaryOperator() ?? IsUnaryOperator();

            if (operation == null)
            {
                return null;
            }

            return operation;
        }

        private BinaryOperationNode IsBinaryOperator()
        {
            Token token = tokenizer.Peek();

            if (token == null
                || !Attributes.CheckAttribute<OperatorAttribute>(token.Kind)
                || Attributes.IsUnary(token.Kind))
            {
                return null;
            }

            tokenizer.Get();

            return new BinaryOperationNode(token.Kind, null, null, token.LineNumber, token.Position);
        }

        private UnaryOperationNode IsUnaryOperator()
        {
            Token token = tokenizer.Peek();

            if (token == null
                || !Attributes.CheckAttribute<OperatorAttribute>(token.Kind)
                || !Attributes.IsUnary(token.Kind))
            {
                return null;
            }

            tokenizer.Get();

            return new UnaryOperationNode(token.Kind, null, token.LineNumber, token.Position);
        }

    }
}