1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
|
#compdef xsltproc xmllint -value-,XML_CATALOG_FILES,-default-
# xmllint: using libxml version 20707
# xsltproc: using libxml 20707, libxslt 10126 and libexslt 815
local -a encoding
encoding=(
UTF-8
UTF-16
ISO-10646-UCS-2
ISO-10646-UCS-4
ISO-8859-1
ISO-8859-2
ISO-8859-3
ISO-8859-4
ISO-8859-5
ISO-8859-6
ISO-8859-7
ISO-8859-8
ISO-8859-9
ISO-2022-JP
SHIFT_JIS
EUC-JP
)
case $service in
xsltproc)
_arguments \
'(-)'{--version,-V}'[show libxml and libxslt versions]' \
'(--verbose -v)'{--verbose,-v}"[show logs of what's happening]" \
'(--output -o)'{--output,-o}'[specify output file]:output file:_files' \
'--timing[display the time used]' \
'--repeat[run the transformation 20 times]' \
'--debug[dump the tree of the result instead]' \
'--dumpextensions[dump registered extension elements and functions]' \
'--novalid[skip the DTD loading phase]' \
'--nodtdattr[do not default attributes from the DTD]' \
'--noout[do not dump the result]' \
'--maxdepth[increase the maximum depth]:depth' \
'--maxparserdepth[increase the maximum parser depth]:depth' \
'--html[input document is an HTML file]' \
'--encoding[the input document character encoding]:encoding:(${encoding[@]})' \
'--param[pass a parameter,value pair]:name::value (xpath expression)' \
'--stringparam[pass a parameter]:name::value' \
'--path[provide a set of paths for resources]:paths:_files -/' \
'--nonet[refuse to fetch DTDs or entities over network]' \
'--nowrite[refuse to write to any file or resource]' \
'--nomkdir[refuse to create directories]' \
'--writesubtree[allow file write only with the path subtree]:path:_files -/' \
'--catalogs[use SGML catalogs]' \
'--xinclude[do XInclude processing on document input]' \
'--xincludestyle[do XInclude processing on stylesheets]' \
'--load-trace[print trace of all external entites loaded]' \
{--profile,--norman}'[dump profiling information]' \
'1:stylesheet:_files -g "*.xsl(-.)"' \
':file:_files -g "*.xml(-.)"' && return
;;
xmllint)
_arguments \
'--version[display the version of the XML library used]' \
'--debug[dump a debug tree of the in-memory document]' \
'--shell[run a navigating shell]' \
'--debugent[debug the entities defined in the document]' \
'--copy[used to test the internal copy implementation]' \
'--recover[output what was parsable on broken XML documents]' \
'--huge[remove any internal arbitrary parser limits]' \
'--noent[substitute entity references by their value]' \
"(--output -o)--noout[don't output the result tree]" \
'--path[provide a set of paths for resources]:paths:_files -/' \
'--load-trace[print trace of all external entites loaded]' \
'--nonet[refuse to fetch DTDs or entities over network]' \
'--htmlout[output results as HTML]' \
'--nowrap[do not put HTML doc wrapper]' \
'--nocompact[do not generate compact text nodes]' \
'--valid[validate the document in addition to std well-formed check]' \
'(--dtdvalid --relaxng --schema)--postvalid[do a posteriori validation, i.e after parsing]' \
'(--postvalid --relaxng --schema --dtdvalidfpi)--dtdvalid[do a posteriori validation against a given DTD]:DTD:_webbrowser' \
'(--postvalid --relaxng --schema --dtdvalid)--dtdvalidfpi[as --dtdvalid but specify DTD with public identifier]:DTD identifier' \
'--timing[print some timings]' \
'(--noout --output -o)'{--output,-o}'[save to a given file]:output file:_files' \
'--repeat[repeat 100 times, for timing or profiling]' \
'--insert[ad-hoc test for valid insertions]' \
'--compress[turn on gzip compression of output]' \
'--html[use the HTML parser]' \
'--xmlout[use the XML serializer when using --html]' \
'--push[use the push mode of the parser]' \
'--memory[parse from memory]' \
'--maxmem[imits memory allocation]:bytes' \
'--nowarning[do not emit warnings from parser/validator]' \
'--noblanks[drop (ignorable?) blanks spaces]' \
'--nocdata[replace cdata section with text nodes]' \
'--format[reformat/reindent the input]' \
'--encode[output in the given encoding]:encoding:(${encoding[@]})' \
'--dropdtd[remove the DOCTYPE of the input docs]' \
'--c14n[save in W3C canonical format]' \
'--c14n11[save in W3C canonical format v1.1 (with comments)]' \
'--exc-c14n[save in W3C exclusive canonical format]' \
'--nsclean[remove redundant namespace declarations]' \
'--testIO[test user I/O support]' \
'(--nocatalogs)--catalogs[use SGML catalogs]' \
'(--catalogs)--nocatalogs[deactivate all catalogs]' \
'--auto[generate a small doc on the fly]' \
'(--noxincludenode)--xinclude[do XInclude processing]' \
'(--xinclude)--noxincludenode[do XInclude processing but do not generate XInclude nodes]' \
'--loaddtd[fetch external DTD]' \
'--nofixup-base-uris[do not fixup xml:base uris]' \
'--dtdattr[loaddtd + populate the tree with inherited attributes]' \
'--stream[use the streaming interface to process very large files]' \
'--walker[create a reader and walk though the resulting doc]' \
'--pattern[test the pattern support]:pattern value' \
'--chkregister[verify the node registration code]' \
'(--dtdvalid --postvalid --schema)--relaxng[do RelaxNG validation against specified schema]:schema:_webbrowser' \
'(--dtdvalid --postvalid --relaxng)--schema[do validation against specified WXS schema]:schema:_webbrowser' \
'(--dtdvalid --postvalid --relaxng)--schematron[do validation against specified schematron]:schema:_webbrowser' \
'--sax1[use the old SAX1 interfaces for processing]' \
'--sax[do not build a tree but work just at the SAX level]' \
'--oldxml10[use XML-1.0 parsing rules before the 5th edition]' \
'--xpath[evaluate the XPath expression, inply --noout]:XPath expression:' \
'*:XML file:_webbrowser' && return
;;
*XML_CATALOG_FILES*)
compset -q
if [[ -prefix *: ]]; then
_urls
return
fi
compset -S ':*'
_alternative \
'files:catalog file:_files' \
'url-schemas:URL schema:compadd -S "" file:///' && return
;;
esac
return 1
|