# Copyright 2006 Instituto de Investigaciones Dr. José María Luis Mora / # Instituto de Investigaciones Estéticas. # See COPYING.txt and LICENSE.txt for redistribution conditions. # # D.R. 2006 Instituto de Investigaciones Dr. José María Luis Mora / # Instituto de Investigaciones Estéticas. # Véase COPYING.txt y LICENSE.txt para los términos bajo los cuales # se permite la redistribución. require 'strscan' module KRLogic module DefFiles module Parser class Token attr_accessor(:regex, :scan_order, :name) def initialize(regex, scan_order, name) @regex = regex @scan_order = scan_order @name = name.upcase + "_TOK" end def sym @name.to_sym end def scan_str(str_scanner) str_scanner.scan(@regex) end end class Rule attr_accessor(:code) attr_reader(:name) def initialize(name) @name = name.downcase end end require 'kr_logic/def_files/base_elements' require 'kr_logic/def_files/standard_elements' def Parser.prepare grammar_file = AppConfig.grammar_tmp_dir + 'autogen_grammar.y' parser_code = AppConfig.grammar_tmp_dir + 'autogen_parser.rb' Element.setup_lines_and_sections if AppConfig.generate_grammar begin f = File.open(grammar_file, 'w') f << Parser.create_grammar rescue SystemCallError puts "IO error creating temporary grammar file" raise ensure f.close unless f.nil? end racc_output = `racc #{grammar_file} -o#{parser_code}` raise "Error processing grammar file" if ($?.exitstatus != 0) end # The following line loads autogenerated KRLogic::DefFiles::Parser::GrammarParser class require "#{parser_code}" @p = GrammarParser.new end def Parser.parse(def_file_contents, file_name) def_file_contents += "\n" ts = TokenSet.new(def_file_contents, file_name) @p.tokens = ts.to_a @p.parse end def Parser.create_grammar y = <str, :line=>@line, :file=>@file}] end end raise "Syntax error: #{self.peek(20)}, line: #{@line}, #{@file.name}." end end class TokenSet def initialize(str, file_name) @tokenizer = Tokenizer.new(str, file_name) @tokens = Array.new while true t = @tokenizer.shift @tokens.push(t) break if (t[0] == false) end end def to_a @tokens end end end end end