I just use this script and cron.
#!/bin/sh function getone { PAGE=$1 TO=$2 SUBJ=$3 URL=`curl -s "$PAGE" | sed -r -n -e 's/^.*"(http:[^"]*zoom.gif[^"]*)".*$/\1/p'` [ -z "$URL" ] || ( curl -s "$URL" | mime-construct --subject $SUBJ \ --to $TO --attachment comic.gif --type image/gif --file - ) } getone http://comics.com/9_chickweed_lane/ someone@example.org chickwee getone http://comics.com/dog_eat_doug/ someone.else@example.org dougeatdog
I just use this script and cron.
Of course any kind of content scraping brings ethical questions.