Copy of LOOPS 3.3.0
This commit is contained in:
commit
7e8b3b5562
510 changed files with 97978 additions and 0 deletions
2192
etc/convert.pl
Normal file
2192
etc/convert.pl
Normal file
File diff suppressed because it is too large
Load diff
58
etc/gapdoc.txt
Normal file
58
etc/gapdoc.txt
Normal file
|
@ -0,0 +1,58 @@
|
|||
Producing LOOPS documentation with GAPDoc
|
||||
------------------------------------------
|
||||
|
||||
There are only two source files:
|
||||
|
||||
pkg/loops/doc/loops.xml
|
||||
pkg/loops/doc/loops.bib
|
||||
|
||||
Everything else is generated by GAPDoc.
|
||||
|
||||
The following "style" files should be present in the pkg/loops/doc folder:
|
||||
|
||||
lefttoc.css
|
||||
manual.css
|
||||
nocolorprompt.css
|
||||
ragged.css
|
||||
times.css
|
||||
toggless.css
|
||||
gapdoc.dtd
|
||||
chooser.html
|
||||
|
||||
When files are ready, run the following in GAP:
|
||||
|
||||
# path to files, change as needed
|
||||
path := Directory("c:/cygwin64/opt/gap4r7/pkg/loops/doc");;
|
||||
main := "loops.xml";;
|
||||
files := [];;
|
||||
bookname := "loops";;
|
||||
|
||||
# translate bibtex to BibXMLext (only call when loops.bib changes or loops_bib.xml does not exist)
|
||||
b:=ParseBibFiles( Filename(path, "loops.bib") );;
|
||||
WriteBibXMLextFile( Filename(path, "loops_bib.xml") ,b);
|
||||
HeuristicTranslationsLaTeX2XML.ApplyToFile( Filename(path, "loops_bib.xml") );
|
||||
|
||||
# produce inline help and latex
|
||||
doc := ComposedDocument("GAPDoc", path, main, files, true);;
|
||||
r := ParseTreeXMLString(doc[1], doc[2]);;
|
||||
CheckAndCleanGapDocTree(r);
|
||||
t := GAPDoc2Text(r, path);;
|
||||
GAPDoc2TextPrintTextFiles(t, path);
|
||||
l := GAPDoc2LaTeX(r);;
|
||||
FileString(Filename(path, Concatenation(bookname, ".tex")), l);
|
||||
|
||||
# now call pdflatex, bibtex, pdflatex, makeindex, pdflatex with argument <loops>
|
||||
|
||||
# produce html
|
||||
AddPageNumbersToSix(r, Filename(path, "loops.pnr"));
|
||||
PrintSixFile(Filename(path, "manual.six"), r, bookname);
|
||||
h := GAPDoc2HTML(r, path, "MathJax");;
|
||||
GAPDoc2HTMLPrintHTMLFiles(h, path);
|
||||
# optional html without MathJax
|
||||
# h := GAPDoc2HTML(r, path );;
|
||||
# GAPDoc2HTMLPrintHTMLFiles(h, path);
|
||||
|
||||
# now produce .ps, .dvi from .tex,
|
||||
# and copy loops.* as manual.* for extensions pdf, ps, dvi
|
||||
|
||||
# delete auxiliary files
|
25
etc/make_docus
Normal file
25
etc/make_docus
Normal file
|
@ -0,0 +1,25 @@
|
|||
#! /bin/bash
|
||||
cd /opt/gap4r5/local/pkg/loops/doc
|
||||
rm -f manual.aux manual.dvi \
|
||||
manual.idx manual.ilg \
|
||||
manual.ind manual.lab \
|
||||
manual.log manual.pdf \
|
||||
manual.ps manual.six \
|
||||
manual.toc
|
||||
|
||||
echo "*** Producing DVI files ***"
|
||||
tex manual &> /dev/null
|
||||
tex manual &> /dev/null
|
||||
|
||||
/usr/local/lib/gap4r4/doc/manualindex manual &> /dev/null
|
||||
|
||||
tex manual &> /dev/null
|
||||
|
||||
echo "*** Producing PDF & PS files ***"
|
||||
pdftex manual &> /dev/null
|
||||
dvips manual &> /dev/null
|
||||
|
||||
echo "*** Producing HTML files ***"
|
||||
rm ../htm/*
|
||||
../etc/convert.pl -icu -n loops ./ ../htm/
|
||||
|
43
etc/manualindex
Normal file
43
etc/manualindex
Normal file
|
@ -0,0 +1,43 @@
|
|||
#!/bin/sh
|
||||
# lowercase all lines without an @ symbol
|
||||
awk '$0 !~ /@/ {$0=tolower($0)} {print}' $1.idx >$1.idl
|
||||
|
||||
# call makeindex to sort indexentries and produce an index
|
||||
# ... this *needs* manual.mst to be in place, otherwise we'll end
|
||||
# up with a LaTeX style index with \begin{index} .. \end{index}
|
||||
makeindex -l $1.idl
|
||||
|
||||
# put hyphenation help into long commands
|
||||
echo "Hyphenating long index entries"
|
||||
echo '{a=index($0,"`");\
|
||||
if (a==0) {print $0;}\
|
||||
else {\
|
||||
b=index($0,"'"'"'");\
|
||||
if (b-a>40) {\
|
||||
anf=substr($0,1,a);\
|
||||
mid=substr($0,a+1,b-a-1);\
|
||||
end=substr($0,b);\
|
||||
b=length(mid);\
|
||||
i=1;\
|
||||
cap=1;\
|
||||
mid2=substr(mid,1,i-1);\
|
||||
while (i<=length(mid)) {\
|
||||
c=substr(mid,i,1);\
|
||||
if (c!=tolower(c)){\
|
||||
if (cap==0) mid2 = mid2 "\\-";\
|
||||
cap=1;\
|
||||
}\
|
||||
else cap=0;\
|
||||
mid2=mid2 c;\
|
||||
i=i+1;\
|
||||
}\
|
||||
print anf mid2 end;\
|
||||
}\
|
||||
else print $0;\
|
||||
}
|
||||
}' >mh.awk
|
||||
cp $1.ind $1.idl
|
||||
awk -f mh.awk $1.idl >$1.ind
|
||||
|
||||
# clean up
|
||||
rm $1.idl mh.awk
|
63
etc/pack
Normal file
63
etc/pack
Normal file
|
@ -0,0 +1,63 @@
|
|||
#! /bin/bash
|
||||
|
||||
# Preparations
|
||||
package_name="loops-1.1.0"
|
||||
package_dep="/Raktar/LOOPS_Packages"
|
||||
nagyg_maths="/home/nagyg/Maths/2006"
|
||||
|
||||
rm -f $package_dep/$package_name-win.zip
|
||||
rm -f $package_dep/$package_name.tar.gz
|
||||
|
||||
# determine all ASCII files
|
||||
|
||||
echo "Determining all ASCII files"
|
||||
|
||||
cd $nagyg_maths/pkg
|
||||
files=""
|
||||
for i in $( find ) ;
|
||||
do
|
||||
a=$( file -i $i | grep text/plain )
|
||||
if [ "$a" != "" ]; then
|
||||
files="$files $i"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
# turn all ASCII files to DOS format
|
||||
|
||||
echo "Turning all ASCII files to DOS format"
|
||||
|
||||
|
||||
for i in $files ;
|
||||
do
|
||||
unix2dos $i &> /dev/null
|
||||
done
|
||||
|
||||
# make the -win.zip file
|
||||
|
||||
echo "Making the -win.zip file"
|
||||
zip -r $package_dep/$package_name-win.zip \
|
||||
README.loops loops/* \
|
||||
&> /dev/null
|
||||
|
||||
# turn all ASCII files but loops_manual.txt to UNIX format
|
||||
|
||||
echo "Turning all ASCII files but loops_manual.txt to UNIX format"
|
||||
for i in $files ;
|
||||
do
|
||||
dos2unix $i &> /dev/null
|
||||
done
|
||||
|
||||
# chmod clean_docus, make_docus, pack
|
||||
|
||||
echo "Change mode for clean_docus, make_docus and pack"
|
||||
chmod uog+x \
|
||||
loops/etc/make_docus \
|
||||
loops/etc/convert.pl \
|
||||
loops/etc/pack
|
||||
|
||||
# make the .tar.gz file
|
||||
|
||||
echo "Making the .tar.gz file"
|
||||
tar -czf $package_dep/$package_name.tar.gz \
|
||||
README.loops loops/*
|
Loading…
Add table
Add a link
Reference in a new issue