#!/usr/bin/env ruby # # Author: IKARASHI Yoshinori # License: GPLv2 # $Id: twittail.rb 11 2008-03-27 01:01:45Z yoosee $ # #username = "" #password = "" logdir = "" # enter your log location (same with twitlogger.rb) interval = 60 reply_expand = 2 require 'open-uri' require 'uri' require 'timeout' require 'zlib' require 'rexml/document' require 'cgi' require 'nkf' TIMELINE_URI="http://twitter.com/statuses/user_timeline/" C = { :clear => "\e[0m", :bold => "\e[1m", :underline => "\e[4m", :reverse => "\e[7m", :grey => "\e[30m", :red => "\e[31m", :green => "\e[32m", :yellow => "\e[33m", :blue => "\e[34m", :magenta => "\e[35m", :cyan => "\e[36m", :white => "\e[37m", } def fetch uri, user=nil, password=nil, since=nil opts = { "Accept-Encoding"=>"gzip, deflate", "User-Agent"=>'Mozilla/6.0 TwitLogger', } opts[:http_basic_authentication] = [user,password] if user and password opts['Last-Modified-Since'] = since.httpdate if since page, meta, content_type, status, message = nil, nil, nil, nil, nil begin page = timeout(10) { URI.parse(uri).open(opts) {|f| content_type = f.content_type f.read } } rescue OpenURI::HTTPError STDERR.puts "HTTP Error #{$!}" return '' rescue StandardError, TimeoutError STDERR.puts "Error #{$!}" return '' end if /\A\x1f\x8b/ =~ page begin page = Zlib::GzipReader.new(StringIO.new(page)).read || '' rescue Zlib::Error page = '(zlib decode error)' end end return page end def parse_xml page rexml = REXML::Document.new page a = Array.new rexml.elements.each("//statuses/status") do |status| begin name = status.elements["user/screen_name"].text time = Time.parse(status.elements["created_at"].text) text = status.elements["text"].text rescue puts "#{$!}:#{$@}" end a.push "#{time.strftime("%H:%M:%S")} #{name}: #{text}" end return a end def expand_uri uri title = '' page = fetch uri if /(.+?)<\/title>/im =~ page title = CGI.unescape(NKF.nkf('-w', $1).gsub(/\n/, ' ')) end return title end def expand_replies reply_name, num_expand replies = '' uri = TIMELINE_URI + reply_name + '.xml' + "?count=#{num_expand}" a = parse_xml(fetch(uri)) a[0..(num_expand-1)].each do |l| r_time, r_name, r_text = l.chomp.split(/\s/, 3) r_name.sub!(/:/, '') r_time = r_time[0..4] replies += " => #{r_time} #{C[:cyan]}#{r_name}#{C[:clear]}: #{r_text}\n" end return replies end def pickup_keyword end logfile = Time.now.strftime("%Y%m%d.log") log = File.open(logdir + logfile) log.seek(0, IO::SEEK_END) while true unless s = log.gets sleep 1 next end begin next if !s or s.chomp.empty? or /^\s*$/ =~ s time, name, text = s.chomp.split(/\s/, 3) name.sub!(/:/, '') time = time[0..4] replies = '' replies = expand_replies($1, reply_expand) if reply_expand > 0 && /\@(\w+)/ =~ text text.sub!(/@\w+/){|u| "#{C[:underline]}#{C[:magenta]}#{u}#{C[:clear]}"} text += " [#{C[:yellow]}#{expand_uri $1}#{C[:clear]}]" if /(s?https?:\/\/[-_.!~*'()a-zA-Z0-9;\/?:\@&=+\$,%#]+)/ =~ text STDOUT.puts "#{C[:bold]}#{C[:yellow]}#{time}#{C[:clear]} #{C[:bold]}#{C[:cyan]}#{name}#{C[:clear]}: #{text}" STDOUT.puts replies unless replies.empty? STDOUT.flush rescue STDOUT.puts "#{$!}:#{$@}" end sleep 5 end