[Wolves] delicious link logger

Rob Annable rob at annable.co.uk
Tue May 11 05:49:13 BST 2004


> Why not just write something that
> subscribes to your del.icio.us RSS feed and converts it (or xslt's it,
> or whatever) into HTML?

I suspect that this is very good advice Aq, however, you're forgetting that
I make buildings for a living, not code.

There's a link on the de.licio.us site to a perl script that, when cron'ed,
will extract your links and deliver a file into your blosxom folder as an
entry. It's exactly what I'm after, but predictably, I can't get it to work.

My error log says:

 [error] [client 82.37.7.151] Premature end of script headers:
/home/rytdsjzi/public_html/rob/deli.cgi

Perhaps some of you l33t chaps out there could help me find out why. The
perl looks like this:

#!/usr/bin/perl
##
## A blosxom-centric del.icio.us parser that
## creates blosxom entries of your daily
## del.icio.us links.
##
## Best if cron-ed to run daily.
##
## Make sure you check out the link output area near
## the bottom of the script for formatting options
## and info. on how to access other tags.
##
## By:  Brett O'Connor (oconnorb AT dogheadbone.com)
##      Mike Hostetler (thehaas AT binary.net)
##
## Last Revision Date: 2004-03-08
#############################################################
require XML::Parser;
require LWP::UserAgent;
require HTTP::Request::Common;

#your del.icio.us account info
$login = "eversion";
$password = "temppassword";

#file name and location setup for ouputting to file
$timestamp = time();
$outfile = "/rytdsjzi/public_html/rob/journal/links/".$timestamp.".txt";

#ouput header and footer
$header = "today's links\n<ul>";
$footer = "</ul>(<i>delivered via <a
href=\"http://del.icio.us/\">del.icio.us.</a>)</i>";

#get todays entries
($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDat, $DayOfYear, $IsDST)
=
localtime(time);
my $RealMonth = $Month + 1;
if ($RealMonth < 10)
{
    $RealMonth = "0" . $RealMonth;
}
if ($Day < 10)
{
    $Day = "0" . $Day;
}
$FixedYear = $Year + 1900;
$date = "$FixedYear-$RealMonth-$Day";

#create the output
my $parser = new XML::Parser (Style=>'Subs', Pkg=>'SubHandlers',
ErrorContext=>
2);
$parser->setHandlers(Char => \&charData);

my $out;

$parser->parse(fetchPage($login,$password,$date));

#write out entry if to file
$outputTo = 'file'; #debugging purposes
#$outputTo = 'file';
if ($out ne '') {
    if ($outputTo eq 'file') {
        open(FILE, ">$outfile");
        print FILE "$header";
        print FILE "$out";
        print FILE "$footer";
        close(FILE);
    } else {
        printf($header);
        printf($out);
        printf($footer);
    }
}

#subroutines
sub fetchPage {
    my ($user,$pass,$date) = @_;

    #setup user agent
    my $ua = new LWP::UserAgent;
    $ua->agent("Deloxom/0.1 " . $ua->agent);
    $ua->credentials("del.icio.us:80","del.icio.us API",$user,$pass);

    #setup request
    my $req = new HTTP::Request GET =>
"http://del.icio.us/api/posts/get?dt=".$date."";

    #request data
    my $response = $ua->request($req);

    if ($response->is_success) {
        return $response->content();
    } else {
        print $response->error_as_HTML();
    }
}
sub charData
{
    #kill misc data
}
#xml handling subroutines
package SubHandlers;
sub post {
    my $expat = shift; my $element = shift;
    while (@_) {
        my $att = shift;
        my $val = shift;
        $attr{$att} = $val;
    }

    ####################
    # LINK OUTPUT AREA #
    ####################

    # add to output the link preceeded by a raquo:
    $out .= "<li><a
href=\"".$attr{'href'}."\">".$attr{'description'}."</a></li>";
    # extended tag output:
    if ($attr{'extended'}) {
        $out .= "<blockquote>".$attr{'extended'}."</blockquote>";
        undef $attr{'extended'};
    }

    # some other stuff you might be interested in:
    #
    # $attr{'time'} << timestamp like "2004-02-10T05:11:37Z"
    # $attr{'tag'} << tags as a space-seperated list
    #
}
sub post_ {
    # by default nothing but put any formatting
    # you want to appear after each post here
}
#EOF



Rob
--
http://rob.annable.co.uk




More information about the Wolves mailing list