#!/bin/sh # websearch - search for a string in a WWW index, feeding it to a browser # $Id$ # option: which catalog to use # parse cmdline - see ps2gif for a more extensive parse Puke() { if [ -n "$*" ]; then echo "Fatal error: $*"; fi cat <&2 Usage: $0 [-n] [ -lynx | -lynxsource | -lynxpost | -w3m | -links | -netscape | -newnetscape | -konqueror ] [ -validate | -validcss | -google | -imlucky | -lycos | -webcrawler | -opentext | -hotbot | -inref | -deja | -altavista | -metacrawler | -northernlight | -teoma | -excite | -excitenews | -exciterev | -excitetracker | -excitenewsclass | -infoseek | -newsseek | -faqseek | -ilse | -ilsemail | -ilsenews | -ilseftp | -ftpsearch | -sunmanagers | -webster | -webster2 | -eurodicautom | -dummy | -petri | -sun | -sunsolve | -sunpatch | -answerbook | -suse | -faq | -rfc -rfc2 | -bugsubj | -bugcont | -fd | -e2 -userfriendly | -tranexp | -travlang | -travennl | -travnlen ] keyword [keyword2 [...]] (but on a single line, of course) to start up a WWW browser on a WWW index and feed it one or more keywords; -newnetscape starts up Mozilla; -netscape feeds an existing one; -konqueror feeds an existing Konqueror; -w3m starts up w3m; -links starts up links; -lynx and -lynxsource start up Lynx; -lynxpost allows the URL to be POSTed (required for some catalogues) -google etcetera select the catalogue to search; special catalogues include: -validate, -validcss taking a URL as an argument, validates the HTML contents -tranexp translate page from English to 'Dutch' using www.tranexp.com warning: do not eat or drink while using this -sunsolve searches the Sunsolve Online website -sunpatch doesn't actually search, it requires a 6-digit patch number and returns the HTML document description from Susolve online (if you are authorized to use it); if you supply a revision number as well, the *latest revision* of the patch wil be downloaded directly (but the tarball is named after the revision number you specified!) -faq requires the exact name of a newsgroup or newsgroup hierarchy; -bugsubj -bugcont search BUGTRAQ by subject/content, respectively; -sdb search in local SuSE sdb (if installed) -deja search Deja(News) USENET article database; some preprocessing is done on the command line depending on the catalogue: in some catalogues, Altavista-style attribute:value pairs can be used: -eurodicautom translates words using the EURODICAUTOM online dictionary; it accepts the special keywords from:xx, to:xx, disp:ff, and subj:ss, where xx is a language code (da,nl,en,fi,fr,de,gn,it,la,pt,es,sv), ff is the code of a field to display (ab,au,ps,date,df,ni,ph,rf, showcf,cm,nt,be,ty), and ss is a subject code (xx,ag,tr8,tz, etc.) -ftpsearch does a case insensitive glob search; 'domain:nl' can be used to limit by host domain (needs refinement); -altavista searches in Altavista; for Altavista, the standard attributes are recognized (not all of them appear to work though) plus the following: what:web (default) searches the Web, what:news selects Usenet; join: selects how mult. keywords are joined, join:and is default; kl: selects the interface language, possible values are zh cz da nl en et fi fr de el he hu is it ja ko lv no pl pt ro ru es sv xx fmt: selects the output format, values are s c d n for standard, compact, detailed, or number only; d0: selects first possible date for results, possible value: 21/Mar/96 ldate: selects last date, same range of values ZZ exit 1 } # defaults justshow= catalog=google auth= browser=` for b in w3m links lynx do if type $b >/dev/null 2>/dev/null then echo $b fi done` # parse command line ARGS= while : do case "$1" in -h|-help|\?*) Puke;; -n) justshow=1;; -auth=*) auth=$1;; -netscape|-mozilla|-rm|-rn) browser=netscaperemote;; -newnetscape|-newmozilla|-nm|-nn) browser=netscapestart;; -w3m) browser=w3m;; -links) browser=links;; -lynxget|-lynx|-g) browser=lynxget;; -dump|-ld) browser=lynxdump;; -lynxpostdump|-pd) browser=lynxpd;; -post|lynxp*|-pp) browser=lynxpp;; -lynxsource|-ls|-src|-s) browser=lynxsource;; -konq*|-kfmclient) browser=kfmclient;; -d) catalog=dummy;; -*) catalog=`echo $1 | perl -pe 's/.//;'`;; "") break;; # hmmm ... *) #ARGS="$ARGS $1" ARGS="$ARGS"' '`echo "$1" | perl -pe 's#[^ ]*( |%20).*#%22$&%22#; s# #%20#g;'`;; esac shift done ARGS=`echo "$ARGS" | perl -pe 's/^ //;'` # remove leading blank # blanks still separate the arguments; we may insert AND etc. between them # depending on the search engine # set the URL depending on the $catalog value ProcessFtpsearchArgs() { ARGS=`echo "$ARGS" | perl -n \ -e '%q = ("type","Case+insensitive+glob+search");' \ -e '%validvals=("type","glob","domain","nl edu uk fr de dk com",' \ -e '"path",".");' \ -e '$q=$r="";' \ -e 'foreach (split) {if(/^([^:]+):(.+)/ && defined($validvals{$1})) {' \ -e '($k,$v) = ($1,$2);' \ -e '$pat=$validvals{$k}; $pat=~s/ /|/g; $pat = "^(".$pat.")\\$";' \ -e 'if ($v=~/$pat/) {' \ -e '$v = ".$v" if $k eq "domain";' \ -e '$k = "limdom" if $k eq "domain";' \ -e '$k = "limpath" if $k eq "path";' \ -e '$v = "Case+insensitive+glob+search" if $v eq "glob";' \ -e '$q{$k}=$v;' \ -e '} else {' \ -e 'print "error: value for $k ($v) not one of ($validvals{$k})";' \ -e 'exit 1;}' \ -e '} else {$r .= "$_ ";} } chop($r);' \ -e 'foreach (keys(%q)) {$q.="&$_=$q{$_}";}' \ -e 'print "$q&query=$r"'` case "$ARGS" in error:*) echo "$ARGS" 1>&2; exit 1 ;; esac } ProcessEurodicautomArgs() { ARGS=`echo "$ARGS" | perl -n \ -e '%q = ("src","nl",' \ -e '"target", "en",' \ -e '"disp", "",' \ -e '"subjs", "xx");' \ -e '%validvals = ("from","da nl en fi fr de gn it la pt es sv",' \ -e '"to", "da nl en fi fr de gn it la pt es sv",' \ -e '"disp", "ab au ps date df ni ph rf showcf cm nt be ty",' \ -e '"subj", "xx ag 008/009 tr8 bz fi5 ch/ic ba fi7 de po/eca ec ed "' \ -e '. "el/phb tv/juk el en ce fi agb go in aul/aum as ju mg me mi at "' \ -e '. "so/ju9 st si fi6 te aug/oo co tr");' \ -e '$q=$r="";' \ -e 'foreach (split) {if(/^([^:]+):(.+)/ && defined($validvals{$1})) {' \ -e '($k,$v) = ($1,$2);' \ -e '$pat=$validvals{$k}; $pat=~s/ /|/g; $pat = "^(".$pat.")\\$";' \ -e 'if ($v=~/$pat/i) {' \ -e '$k = "src" if $k eq "from";' \ -e '$k = "target" if $k eq "to";' \ -e '$k = "subjs" if $k eq "subj";' \ -e '$q{$k}=$v;' \ -e '} else {' \ -e 'print "error: value for $k ($v) not one of ($validvals{$k})";' \ -e 'exit 1;}' \ -e '} else {$r .= "$_ ";} } chop($r);' \ -e '$q{"target"} .= "+";' \ -e '$q{"subjs"} .= "+";' \ -e 'foreach (keys(%q)) {$q.="&$_=\U$q{$_}";}' \ -e 'print "$q&term=$r"'` case "$ARGS" in error:*) echo "$ARGS" 1>&2; exit 1 ;; esac } ProcessAltavistaArgs() { ARGS=`echo "$ARGS" | perl -n \ -e '%q = ("join","and","kl","en","what","web","fmt",".",);' \ -e '%validvals=("what","web news","kl","zh cz da nl en et fi fr de ' \ -e 'el he hu is it ja ko lv no pl pt ro ru es sv xx","fmt",". c d n",' \ -e '"d0","","d1","","join","and or near"); $q=$r="";' \ -e 'foreach (split) {if(/^([^:]+):(.+)/ && defined($validvals{$1})) {' \ -e '($k,$v) = ($1,$2);' \ -e '$pat=$validvals{$k}; $pat=~s/ /|/g; $pat = "^(".$pat.")\\$";' \ -e 'if ($v=~/$pat/) {' \ -e '{$q{$k}=$v;}' \ -e '} else {' \ -e 'print "error: value for $k ($v) not one of ($validvals{$k})"; ' \ -e 'exit 1;}' \ -e '} else {$r .= "$_ ";} } chop($r);' \ -e 'foreach(keys(%q))' \ -e '{if($_ eq "join"){$join=$q{$_};}else{$q.="&$_=$q{$_}";}}' \ -e '$r =~ s/ /+$join+/g;' \ -e 'print "&mss=","nl","%2Fsearch&count$q&q=$r"'` case "$ARGS" in error:*) echo "$ARGS" 1>&2; exit 1 ;; esac } case "$catalog" in dummy) # test on local HTTP daemon's 'printenv' (if available) url='http://localhost:4321/cgi-bin/printenv?' # I like this, it allows me to test this script when I'm not online ;; val|validate) # submit the argument to W3C's HTML validator url='http://validator.w3.org/check?weblint;pw;outline;sp;uri=' ;; valcss|validcss) # submit the argument to W3C's CSS validator url='http://jigsaw.w3.org/css-validator/validator?uri=' lynx="$lynx -force_html" # helpt geen flikker ;; tranword) url='http://www.tranexp.com:2000/InterTran?type=text&from=dut&to=eng&phrase=' lynx="$lynx -force_html" # helpt geen flikker ;; trannnlword) url='http://www.tranexp.com:2000/InterTran?type=text&from=eng&to=dut&text=' lynx="$lynx -force_html" ;; trannl) # translate document using tranexp - sit back and enjoy url='http://www.tranexp.com:2000/InterTran?type=url&from=eng&to=dut&url=' ;; tran*) # translate document using tranexp - sit back and enjoy url='http://www.tranexp.com:2000/InterTran?type=url&from=dut&to=eng&url=' ;; fm|freshmeat) url='http://freshmeat.net/search.php3?query=' # kijk, dat is tenminste simpel! ;; faq) url=http://www.cs.uu.nl/wais/html/na-bng/ [ -n "$ARGS" -a -z "`echo "$ARGS" | sed 's/[a-z_-+.]//g'`" ] \ || Puke "-faq argument must be the name of a newsgroup or hierarchy" ARGS="$ARGS".html ;; bugcont) #url='http://www.netspace.org/cgi-bin/wa?S2=bugtraq&f=&a=&b=&s=&q=' url='http://www.progressive-comp.com/Lists/?l=bugtraq&w=2&r=1&q=b&s=' ;; bugsubj) #url='http://www.netspace.org/cgi-bin/wa?S2=bugtraq&f=&a=&b=&q=&s=' url='http://www.progressive-comp.com/Lists/?l=bugtraq&w=2&r=1&q=t&s=' ;; rfc2) url=http://jx.online.sh.cn/rfc/ # China ! [ -n "$ARGS" -a -z "`echo "$ARGS" | sed 's/[0-9][0-9][0-9]*//'`" ] \ || Puke "-rfc argument must be a number" ARGS=`echo "$ARGS" | sed 's/.*/rfc&.html/'` ;; rfc) url=http://www.dc.luth.se/doc/rfc/ [ -n "$ARGS" -a -z "`echo "$ARGS" | sed 's/[0-9][0-9][0-9]*//'`" ] \ || Puke "-rfc argument must be a number" ARGS=`echo "$ARGS" | sed 's/.*/rfc&.txt/'` ;; g|google) url='http://www.google.com/search?q=' #ARGS=`echo "$ARGS" | sed 's/^[a-zA-Z][a-zA-Z]*$/%2B&/g'` # why ?? ;; i|imlucky) url='http://www.google.com/search?btnI=1&q=' #ARGS=`echo "$ARGS" | sed 's/^[a-zA-Z][a-zA-Z]*$/%2B&/g'` # why ?? ;; nl|northernlight) url='http://www.northernlight.com/nlquery.fcg?cb=0&search.x=0&search.y=0&qr=' ;; t|teoma) url='http://www.teoma.com/gs?terms=' ARGS=`echo "$ARGS" | sed -e '/%22/s/$/\&phrase=1/' -e '/%22/s/%22//g'` ;; sdb|suse) url='http://localhost/cgi-bin/htsearch?method=and&format=builtin-long&config=htdig&restrict=&exclude=&words=' ;; petri*) url='http://www.daimi.au.dk/cgi-petrinet/individuals/fuzzy?name=' ARGS=`echo "$ARGS" | sed -e 's/%22//g'` # no multiword quoting please #ARGS=`echo "$ARGS" | sed 's/^[a-zA-Z][a-zA-Z]*$/%2B&/g'` # why ?? ;; sun) url='http://search.sun.com/query.html?col=www&st=1&nh=10&qt=' ;; sun100) url='http://search.sun.com/query.html?col=www&st=1&nh=100&qt=' ;; sunmanagers) # Dataman's searchable archive of the Sun-managers mailing list #url='http://www.dataman.nl/cgi-bin/sunmanagers?adv=' #url=$url'&fuzzyfactor=Minimal&allabove=on&query=';; url='http://www.dataman.nl/sunman.lp?sessionID=cHfMzY9Dc8xnzo35&doit=1' url=$url'&f=0&allabove=1&string=' ;; fs|ftpsearch) # FTP-search, the famed Archie re-incarnation # only glob searching ... put more options into ProcessFtpsearchArgs url='http://ftpsearch.ntnu.no/cgi-bin/search?' url=$url'hits=50&matches=&hitsprmatch=&limpath=&header=none&sort=none' url=$url'&trlen=20&f1=Count&f2=Mode&f3=Size&f4=Date&f5=Host&f6=Path' # treat the special arguments ProcessFtpsearchArgs ;; ftpsearch2) # commercial version :( now seems closed url='http://ftpsearch.com/cgi-bin/search?' url=$url'hits=50&matches=&hitsprmatch=&limpath=&header=none&sort=none' url=$url'&trlen=20&f1=Count&f2=Mode&f3=Size&f4=Date&f5=Host&f6=Path' # treat the special arguments ProcessFtpsearchArgs ;; webster) # English dictionary search #url='http://work.ucsd.edu:5141/cgi-bin/http_webster?method=exact&isindex=' url='http://smac.ucsd.edu/cgi-bin/http_webster?method=exact&isindex=' ;; webster2) # another address for the same service url='http://www2.lib.uchicago.edu/cgi-bin/www2/WEBSTER.sh?WORD=' ;; everything|everything2|e2) # the user-supplied dictionary system at www.everything2.{com,org} url='http://www.everything2.org/index.pl?type=e2node&node=' ARGS="`echo "$ARGS" | perl -pe 's/%27//g'`" ;; ed|eurodicautom) # an actual online translating dictionary case "$browser" in lynxpp) ;; *) echo You must use -lynxpost for this catalog; exit 1;; esac url='http://www2.echo.lu/cgi/edic/edicnew2.pl?c=50&what=term' case "$ARGS" in *\**) url="$url&match=" ;; *+*) url="$url&match=@" ;; *) url="$url&match=@@" ;; esac ProcessEurodicautomArgs ;; ab|answerbook) # SunOS 5.6 answerbook, local copy url='http://svan01:8888/' url=$url'ab2/@Ab2CollSearch?_AB2_SearchAllColl=0&DwebQuery=' ARGS=`echo "$ARGS" | perl -pe 's/%27|['"'"'"]/%22/g'` ;; sunsolve) case "$browser" in lynxpp) ;; *) echo You must use -lynxpost for this catalog; exit 1;; esac # online.sunsolve.sun.co.uk, paying customers only url='http://online.sunsolve.sun.co.uk/private-cgi/uk/query.pl?formname=complex&collections=bug%20enotify%20faqs%20infodoc%20patches%20patchrpts%20secbull%20solqna%20srdb%20stb%20&query-110=&query-128=&datespan=1&dateqty=month&maxdocs=100&sortorder=rel&query-32=' ARGS=`echo "$ARGS" | perl -pe 's/%27|['"'"'"]/%22/g;s/ /%20/g'` ;; sunpatch) # provide a SunOS patch number, it will supply the patch case "$ARGS" in [0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9]) # with revision number: direct download url='http://online.sunsolve.sun.co.uk/private-cgi/senddoc?/' ARGS="$ARGS.tar.Z" ;; [0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9]) # without revision number: HTML overview page url='http://online.sunsolve.sun.co.uk/private-cgi/doc2html?patches/' ;; *) Puke "supply a number of the form \d{6} or \d{6}-\d\d" ;; esac ;; #dejanews) # see excitenews) below deja*) url='http://www.deja.com/dnquery.xp?svcclass=dnserver&QRY=' # that's what Google uses, anyway, except for the svcclass ;; dejanolongerworks) # DejaNews - this probably doesn't work at the moment url='http://search.dejanews.com/dnquery.xp?defaultOp=AND&maxhits=100&format=terse&query=' # now convert arguments from an Altavista-style notation to a query # ARGS=`echo "$ARGS" | perl -n \ # -e 'sub sb{local($p,$r)=@_;(substr($p,0,length($r))eq$r)?$p:undef;}' \ # -e 'sub sa{foreach $p (("subj","author","login","host","organization",' \ # -e '"group","groups","keywords","month"))' \ # -e '{($res=&sb($p,$_[0]))&&return($res);}' \ # -e '"subj";}' \ # -e 'sub sc{local($_)=$_[0];/(.)/?$1:"s";}' \ # -e 'foreach (split) {if(/^([^:]+):(.+)/)' \ # -e '{$k{&sc(&sa($1))}=$2;}else{$k{"querytext"}=$_;}}'\ # -e 'print join("+",grep(($_="~$_+=$k{$_}"),keys(%k)));'` # no, that's too difficult: we've got to create strings like this one: # query=spelling+boekje&filter= # {dnserver.dbapr+{{~g+#nl.*#~a+#reinpost@*#~dc+{{1996/06/01+1996/12/22}}}}}+ # {dnserver.db96q3+{{~g+#nl.*#~a+#reinpost@*#~dc+{{1996/06/01+1996/12/22}}}}}+ # {dnserver.db96q4+{{~g+#nl.*#~a+#reinpost@*#~dc+{{1996/06/01+1996/12/22}}}}}+ # {dnserver.db96q5+{{~g+#nl.*#~a+#reinpost@*#~dc+{{1996/06/01+1996/12/22}}}}} # &defaultOp=AND&maxhits=25&format=terse&threaded=0&showsort=score&agesign=1& # ageweight=1 ;; # many of the following entries may not actually work excite) #url='http://www.excite.com/search.gw?collection=web&search=' url='http://www.excite.com/search.gw?search=' # says Google ;; dejanews|excitenews) url='http://www.excite.com/search.gw?collection=news&search=' ;; exciterev) url='http://www.excite.com/search.gw?collection=guide&search=' ;; excitetracker) url='http://www.excite.com/search.gw?collection=timely&search=' ARGS=`echo "$ARGS" | sed 's/ /+/g'` ;; excitenewsclass) url='http://www.excite.com/search.gw?collection=news.class&search=' ;; webcrawler) url='http://www.webcrawler.com/cgi-bin/WebQuery?/' ;; yahoo) url='http://search.yahoo.com/bin/search?p=' ;; amazon) url='http://www.amazon.com/exec/obidos/external-search/?tag=googlinc05&keyword=' # says Google ... ;; opendir|dmoz) url='http://search.dmoz.org/cgi-bin/search?search=' # says Google ;; egroups) ARGS=`echo "$ARGS" | sed 's/[ +]/%2B/g'` url='http://www.egroups.com/info/top?murl=/info/mainarcsearch%3fquery%3d' # says Google ;; m|metacrawler) case "$ARGS" in ' ') mtd=2 ;; *) mtd=0 ;; esac # hack! url='http://www.metacrawler.com/crawler?region=0&rpp=20&timeout=10&hpe=10&format=2&method='$mtd'&general=' ;; hotbot) #hb_huh='IU1HMBk9B371BBEED60AF79402424436D1AD1FD4' # look, it's on the form #url='http://www.search.hotbot.com/'$hb_huh'/hResult.html?' #url=$url'search.x=0&search.y=0&DV=7&RG=.com&DC=10&DE=2&OPs=MDRTP&_v=2&DU=days&SW=web&' #ARGS=`echo "$ARGS" | perl -pe \ # 'if(/ /){s|^|SM=MC&MT=|;}else{s| |+|g;s|%27||g;s|^|SM=phrase&MT=|;}'` # all nice, no longer woekrd - try what Google uses: url='http://www.hotbot.com/?MT=' ;; ilse|ilsew*) url='http://www.ilse.nl/cgi/nph-search.cgi?database=WWW&maxoutput=100&extra=on&query=' ;; ilsem*) url='http://www.ilse.nl/cgi/nph-search.cgi/&database=Mail&maxoutput=100&extra=on&query=' ;; ilsen*|ilseu*) url='http://www.ilse.nl/cgi/nph-search.cgi/&database=Usenet&maxoutput=100&extra=on&query=' ;; ilseftp) url='http://www.ilse.nl/cgi/nph-search.cgi/&database=FTP&maxoutput=100&extra=on&query=' ;; tuetel) #url='http://www.win.tue.nl/bin/phf?Jserver=cso.tue.nl&Qname=' #niet meer: #url='http://cgi.tue.nl/cgi-bin/WebPh?DB=csnet-ns&View=all&ns_server=cso.tue.nl&name=' url='http://cso.tue.nl/WebPh?DB=csnet-ns&View=all&ns_server=cso.tue.nl&name=' ARGS=`echo "$ARGS" | perl -pe 's| |+|g;s|^%27||g;s|%27$||;'` ;; opentext) # may no longer work url='http://search.opentext.com/omw/simplesearch?mode=and&SearchFor=';; lycos) # may no longer work #url='http://www.lycos.com/cgi-bin/pursuit?matchmode=and&maxhits=50&query=' url='http://www.lycos.com/cgi-bin/pursuit?query=' # says Google ;; infoseek|wwwseek) #url='http://guide-p.infoseek.com/Titles?sv=IS&ud4=1&lk=lcd&nh=100&col=WW&qt=' url='http://www.infoseek.com/Titles?qt=' # says Google ;; newsseek) url='http://guide-p.infoseek.com/Titles?sv=IS&ud4=1&lk=lcd&nh=100&col=NN&qt=' ;; faqseek) url='http://guide-p.infoseek.com/Titles?sv=IS&ud4=1&lk=lcd&nh=100&col=FQ&qt=' ;; av|altavista) #url='http://www.altavista.telia.com/cgi-bin/query?pg=aq' # discontinued url='http://www.altavista.com/cgi-bin/query?pg=aq' # treat the special arguments # note: parsing isn't perfect, the . argument to fmt isn't checked ProcessAltavistaArgs ;; av2) url='http://www.altavista.digital.com/cgi-bin/query?pg=q&kl=XX&q=' # Altavista according to google ;; reference|inref*) url='http://www.reference.com/cgi-bin/pn/go?choice=Search&search=advanced&mode=Extended&ranking=Relevance' # now convert arguments from an Altavista-style notation to a query ARGS=`echo "$ARGS" | perl -n \ -e 'sub sb{local($p,$r)=@_;(substr($p,0,length($r))eq$r)?$p:undef;}' \ -e 'sub sa{foreach $p (("subj","name","login","host","organization",' \ -e '"groups","keywords","startmonth","startday","startyear",' \ -e '"endmonth","endday","endyear")){($res=&sb($p,$_[0]))&&return($res);}' \ -e '"querytext";}' \ -e 'foreach (split) {if(/^([^:]+):(.+)/)' \ -e '{$k{&sa($1)}=$2;}else{$k{"querytext"}=$_;}}'\ -e 'foreach $k (keys(%k)){print"&",$k,"=",$k{$k};}'` ;; userfriendly|uf) site=http://www.userfriendly.org pref=/cartoons/archives date=`expr "$ARGS" : '\([0-9]*\)'` today=`date +%Y%m%d` case "$ARGS" in "") date=$today ;; "$date") date=$ARGS ;; *) Puke supply a date in the format yyyymmdd, for instance, $today ;; esac yy=`expr $date : '..\(..\)'` case `expr $date : '....\(..\)'` in 01) mn=jan ;; 02) mn=feb ;; 03) mn=mar ;; 04) mn=apr ;; 05) mn=may ;; 06) mn=jun ;; 07) mn=jul ;; 08) mn=aug ;; 09) mn=sep ;; 10) mn=oct ;; 11) mn=nov ;; 12) mn=dec ;; esac url=$site/`lynx -source $site$pref/$yy$mn/$date.html | fgrep $pref | awk -F\" '/uf/ {print $2}'` ARGS= case "$url" in "") Puke no User Friendly cartoon found for $date ;; esac ;; allciv) url='http://www.freeciv.org/archsearch.phtml?query=' ;; fc|fciv|freeciv|fd|freedev|freeciv-dev) url='http://arch.freeciv.org/cgi-bin-pub/searchlists?errors=0&maxfiles=10&maxlines=100&lists=freeciv-dev&dates=Jan+to+Mar+2000&dates=Apr+to+Jun+2000&dates=Jul+to+Sep+2000&dates=Oct+to+Dec+2000&dates=Jan+to+Mar+1999&dates=Apr+to+Jun+1999&dates=Jul+to+Sep+1999&dates=Oct+to+Dec+1999&query=' #url='http://master-www.complete.org/cgi-bin-pub/searchlists?errors=0&maxfiles=10&maxlines=10&lists=freeciv-dev&query=' ;; travlang|travnlen) url='http://dictionaries.travlang.com/DutchEnglish/dict.cgi?max=100&query=' ;; travennl) url='http://dictionaries.travlang.com/EnglishDutch/dict.cgi?max=100&query=' ;; *) Puke sorry, the WWW index $catalog is not supported ;; esac url=$url`echo "$ARGS" | perl -pe 's| |%20|g'` bg= # start in the background? # set cmdline, later postfixed with $ARGS case "$browser" in lynxpd) input=`echo $url | sed 's/^[^?]*?//'` cmd="lynx $auth -post_data `echo $url | sed 's/?.*//'`" ;; lynxpp) tmpfile=/tmp/`basename $0`-post-$$.html touch $tmpfile || Puke cannot create temporary file $tmpfile trap "rm $tmpfile" 1 2 3 13 14 15 urlclean=`echo "$url" | awk -F\? '{print $1}'` urlqargs=`echo "$url" | awk -F\? '{print $2}' | sed 's/\&/ /g'` cat > $tmpfile <lynx post form

lynx post form

You're seeing this form because we want to do a POST request; the corresponding GET request may or may not work.

Form attributes available:
ZZ for qarg in `echo "$url" | awk -F\? '{print $2}' | sed 's/\&/ /g'` do set `echo "$qarg" | sed 's/=/ /'` name=$1 #value=`perl -e '$_=shift(@ARGV);s/%([\dA-Fa-f][\dA-Fa-f])/pack("C",hex($1))/ge;s/\+/ /g;s/"/\"/g;print' "$2"`; value=`perl -e '$_=shift(@ARGV);s/%([\dA-Fa-f][\dA-Fa-f])/pack("C",hex($1))/ge;s/"/\"/g;print' "$2"`; # jawel, beste kijkbuiskinderen, dit werkt op -sunsolve en -eurodicautom #value=`echo "$2" | sed 's/+/ /g'` echo "
$name
">>$tmpfile done cat >> $tmpfile < ZZ cmd="lynx $auth $tmpfile" ;; w3m) cmd="w3m $auth $url";; links) cmd="links $auth $url";; lynx) cmd="lynx $auth $url";; lynxdump) cmd="lynx -dump $auth $url";; lynxsource) cmd="lynx $auth -source $url";; netscaperemote) cmd='netscape -remote OpenURL('$url')';; netscapestart) cmd='netscape '$url; bg=1;; kfmclient) cmd='kfmclient openURL '$url; bg=1;; *) Puke 'which browser do I use?';; esac # actually call it? if [ "$justshow" = 1 ] then cmd="echo $cmd" fi # go ! if [ -n "$input" ] then if [ "$bg" = 1 ] then echo $input | $cmd & else echo $input | exec $cmd fi else if [ "$bg" = 1 ] then $cmd & else exec $cmd fi fi