blob: 4c8613534d857937b5d598b5ff316d40fbf17731 (
plain) (
tree)
|
|
#!/bin/bash -e
#
# Libreboot documentation build script
# The function index() borrows from gendocs.sh, part of gnulib, the
# GNU Portability Library (https://www.gnu.org/software/gnulib/). The
# objective is to generate a html document which indexes the libreboot
# documentation in its various formats.
#
# index() does not assume a certain set of formats have been
# generated. It generates the index based on which files are located
# in $man_dir.
# INITIALIZATION #
##################
# Initialization for manual directory
[ -d docs/manual ] || mkdir docs/manual
# Initialization for building manuals
pkgname="libreboot"
name=$(basename $1) # Always defined; users do not execute directly
constants="docs/constants.texi"
man_dir="docs/manual/"
texinfo_src="docs/libreboot.texi"
resources_dir="docs/resources" # for including in html tar.gz
css_path="" # TODO: add css?
# Default variables
prefix="resources/"
preprefix="../"
make_cmd="makeinfo"
comp_cmd="gzip -f -9 -c"
c_ext=".gz"
meta_type="man"
type="reg"
is_html="no"
make_opts=""
use_install=""
# Initialization for generating eps images
images_path="docs/resources/images"
# Initialization for making index.html
template_dir="docs/templates/"
pkg_title="GNU Libreboot manual"
pkg_email="example@libreboot.org" # sample
pkg_name="libreboot"
cur_date=`date '+%B %d, %Y'`
# In case users try to build more than one format at once.
if [ $# -gt 2 ]; then
echo "ERROR: Only one docs format can be built at once."
exit 1
fi
case "$2" in
"w")
meta_type="web";;
"")
:;; # This is the normal usage case
*)
echo "ERROR: Only one docs format can be built at once."
echo "If you are building html files for the libreboot website, use:"
echo "\$ ./build docs <html-type> w"
exit 1;;
esac
### FUNCTIONS ####
##################
set_constants()
{
echo "@set docsdir $1" > $constants
echo "@set useinstall $2" >> $constants
}
check_eps()
{
printf "INFO: Checking if jpgs have been converted to eps... "
for i in $(find $images_path); do
tdir_name=$(dirname $i)
tfile_name=$(basename ${i,,})
tfile_ext=${tfile_name##*.}
tfile_base=$(basename -s .jpg ${i,,})
if [ $tfile_ext = "jpg" ] && [ ! -e "${tdir_name}/${tfile_base}.eps" ]; then
printf "converting ${i} >>>\n ${tdir_name}/${tfile_base}.eps ...\n\n"
convert $i eps2:${tdir_name}/${tfile_base}.eps # using eps2 reduces file size
fi
done
printf "Done.\n"
}
# FUNCTION COPIED FROM gendocs.sh
# Return size of $1 in something resembling kilobytes.
calcsize()
{
size=`ls -ksl $1 | awk '{print $1}'`
echo $size
}
# Concatenation function called by index
concat_index()
{
if [ -e $man_dir$1 ]; then
if [ -d $man_dir$1 ]; then
echo "<li><a href=\"$1/index.html\">$2</a>$3</li>" >> "${man_dir}index.html"
else
echo "<li><a href=\"$1\">$2</a>$3</li>" >> "${man_dir}index.html"
fi
fi
}
# Main index function
build_index()
{
sed \
-e "s!%%TITLE%%!$pkg_title!g" \
-e "s!%%PACKAGE%%!$pkg_name!g" \
-e "s!%%DATE%%!$cur_date!g" \
${template_dir}gendocs_template_header >"${man_dir}index.html"
for i in $(ls $man_dir); do
[ -d $man_dir$i ] || size=`calcsize "${man_dir}${i}"`
case $i in
${pkg_name}.dvi.gz)
dvi_gz_size=$size;;
${pkg_name}_by-node.tar.gz)
html_node_tgz_size=$size;;
${pkg_name}_by-section.tar.gz)
html_section_tgz_size=$size;;
${pkg_name}_one-page.html)
html_mono_size=$size;;
${pkg_name}_one-page.tar.gz)
html_mono_gz_size=$size;;
${pkg_name}.info.gz)
info_gz_size=$size;;
${pkg_name}.pdf)
pdf_size=$size;;
${pkg_name}.pdf.gz)
pdf_gz_size=$size;;
${pkg_name}.ps.gz)
ps_gz_size=$size;;
${pkg_name}.txt)
plaintext_size=$size;;
${pkg_name}.txt.gz)
plaintext_gz_size=$size;;
${pkg_name}.texi.gz)
texinfo_gz_size=$size;;
*)
:;;
esac
done
concat_index "${pkg_name}_one-page.html" "HTML (${html_mono_size}K bytes)" " - entirely on one web page."
concat_index "${pkg_name}_by-node" "HTML" " - with one web page per node."
concat_index "${pkg_name}_by-section" "HTML" " - with one web page per section."
concat_index "${pkg_name}_one-page.tar.gz" "HTML compressed (${html_mono_gz_size}K gzipped tar file)" " - entirely on one web page."
concat_index "${pkg_name}_by-node.tar.gz" "HTML compressed (${html_node_tgz_size}K gzipped tar file)" " - with one web page per node."
concat_index "${pkg_name}_by-section.tar.gz" "HTML compressed (${html_section_tgz_size}K gzipped tar file)" " - with one web page per section."
concat_index "${pkg_name}.info.gz" "Info document (${info_gz_size}K bytes gzipped)."
concat_index "${pkg_name}.txt" "ASCII text (${plaintext_size}K bytes)."
concat_index "${pkg_name}.txt.gz" "ASCII text compressed (${plaintext_gz_size}K bytes gzipped)."
concat_index "${pkg_name}.dvi.gz" "TeX dvi file (${dvi_gz_size}K bytes gzipped)."
concat_index "${pkg_name}.pdf" "PDF file (${pdf_size}K bytes)."
concat_index "${pkg_name}.pdf.gz" "PDF file compressed (${pdf_gz_size}K bytes gzipped)."
concat_index "${pkg_name}.ps.gz" "Postscript file compressed (${ps_gz_size}K bytes gzipped)."
concat_index "${pkg_name}.texi.gz" "Texinfo source (${texinfo_gz_size}K bytes gzipped)."
sed -e "s!%%EMAIL%%!$pkg_email!g" ${template_dir}gendocs_template_footer >>"$man_dir/index.html"
}
#### OPTIONS #####
##################
# options to makeinfo/texi2dvi and file names
case "$name" in
"dvi")
check_eps
make_opts="-q -c -e -o"
type="tex"
basefile="${pkgname}.dvi";;
"html")
type="dir"
is_html="yes"
make_opts="--html --no-warn --split=node --css-include=$css_path -o"
basefile="${pkgname}_by-node";;
"html-by-section")
type="dir"
is_html="yes"
make_opts="--html --no-warn --split=section --css-include=$css_path -o"
basefile="${pkgname}_by-section";;
"html-one-page")
is_html="yes"
make_opts="--html --no-warn --no-split --css-include=$css_path -o"
basefile="${pkgname}_one-page.html";;
"info")
make_opts="--no-warn --no-split -o"
basefile="${pkgname}.info";;
"pdf")
type="tex"
make_opts="-q -c -e -p -o"
basefile="${pkgname}.pdf";;
"plaintext")
make_opts="--no-warn --no-split --plaintext -o"
basefile="${pkgname}.txt";;
"postscript")
check_eps
make_opts="-q -c -e --ps -o"
type="tex"
basefile="${pkgname}.ps";;
"texinfo-gz")
basefile="${pkgname}.texi"
type="texi";;
esac
# split html and pdf/postscript need special paths to resources
case "$type" in
"dir")
preprefix="../../";;
"tex")
preprefix=""
make_cmd="texi2dvi";;
esac
# compress html using tar
if [ $is_html = "yes" ]; then
comp_cmd="tar czf"
c_ext=".tar.gz"
fi
# set up special paths for web mode
if [ $meta_type = "web" ]; then
if [ $is_html = "yes" ]; then
echo "INFO: Using web mode for $name"
prefix="docs/"
use_install="install/"
else
echo "WARNING: Using web mode has no effect for non-html output."
fi
fi
# write pathnames (prefixes) to file
set_constants "$preprefix$prefix" "$use_install"
outfile="$man_dir$basefile"
###### MAIN ######
##################
if [ $type = "texi" ]; then
echo "Making compressed manual: $outfile$c_ext..."
gzip -f -9 -c $texinfo_src > $outfile$c_ext
else # The normal case
echo "Writing manual: $outfile..."
$make_cmd $make_opts $outfile $texinfo_src
# Don't compress anything if in web mode
if [ ! $meta_type = "web" ]; then
echo "Making compressed manual: ${outfile%\.html}$c_ext..."
# tar and gzip have different usages
if [ $is_html = "yes" ]; then
# Include resources in html tar archives, but not .eps
# and remove .html extension for html-one-page
$comp_cmd ${outfile%\.html}$c_ext $outfile $resources_dir --exclude=*eps
else
$comp_cmd $outfile > $outfile$c_ext
fi
echo "Building index..."
build_index # TODO: when building all, do this only once
fi
fi
echo "Done."
|