#!/usr/local/bin/perl -w # this script builds a cache of web pages, which can be searched # by sitesearch.pl; shd be run each time web site content changes use strict; use CGI qw(:standard); use Data::Dumper; my $dir = 'your_file_path'; # dir to search. my $ext = 'htm'; # page types to search. my $cache = 'sitesearch.dat'; # cache file. my (@Results,$file,$title); # optional boundaries for search area, to avoid # searching on repeated text: my $startstring = 'unique_ident_1'; my $endstring = 'unique_ident_2'; chdir $dir; # get all the relevant pages, strip out title, file name and # searchable text, store in array of hashes: while (<*.$ext>) { open FILE, $_; read FILE, $file, -s(FILE); $file =~ m#