Kitxuuu commited on
Commit
437da41
·
verified ·
1 Parent(s): a395223

Add files using upload-large-folder tool

Browse files
Files changed (20) hide show
  1. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/resources/images/compress-logo-white.xcf +0 -0
  2. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/download_compress.xml +158 -0
  3. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/examples.xml +1295 -0
  4. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/limitations.xml +259 -0
  5. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/security.xml +301 -0
  6. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-379.jar +0 -0
  7. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-382 +0 -0
  8. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-386 +1 -0
  9. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla-multi.7z.001 +0 -0
  10. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla.pack +0 -0
  11. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla.unix.arj +0 -0
  12. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/longfile_bsd.ar +5 -0
  13. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/longfile_gnu.ar +8 -0
  14. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test with spaces.txt +11 -0
  15. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test1.xml +4 -0
  16. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test3.xml +10 -0
  17. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test4.xml +6 -0
  18. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/testAIFF.aif +0 -0
  19. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/testCompress209.doc +0 -0
  20. local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/zipbomb.xlsx +0 -0
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/resources/images/compress-logo-white.xcf ADDED
Binary file (25.8 kB). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/download_compress.xml ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <!--
3
+ Licensed to the Apache Software Foundation (ASF) under one or more
4
+ contributor license agreements. See the NOTICE file distributed with
5
+ this work for additional information regarding copyright ownership.
6
+ The ASF licenses this file to You under the Apache License, Version 2.0
7
+ (the "License"); you may not use this file except in compliance with
8
+ the License. You may obtain a copy of the License at
9
+
10
+ https://www.apache.org/licenses/LICENSE-2.0
11
+
12
+ Unless required by applicable law or agreed to in writing, software
13
+ distributed under the License is distributed on an "AS IS" BASIS,
14
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ See the License for the specific language governing permissions and
16
+ limitations under the License.
17
+ -->
18
+ <!--
19
+ +======================================================================+
20
+ |**** ****|
21
+ |**** THIS FILE IS GENERATED BY THE COMMONS BUILD PLUGIN ****|
22
+ |**** DO NOT EDIT DIRECTLY ****|
23
+ |**** ****|
24
+ +======================================================================+
25
+ | TEMPLATE FILE: download-page-template.xml |
26
+ | commons-build-plugin/trunk/src/main/resources/commons-xdoc-templates |
27
+ +======================================================================+
28
+ | |
29
+ | 1) Re-generate using: mvn commons-build:download-page |
30
+ | |
31
+ | 2) Set the following properties in the component's pom: |
32
+ | - commons.componentid (required, alphabetic, lower case) |
33
+ | - commons.release.version (required) |
34
+ | - commons.release.name (required) |
35
+ | - commons.binary.suffix (optional) |
36
+ | (defaults to "-bin", set to "" for pre-maven2 releases) |
37
+ | - commons.release.desc (optional) |
38
+ | - commons.release.subdir (optional) |
39
+ | - commons.release.hash (optional, lowercase, default sha512) |
40
+ | |
41
+ | - commons.release.[234].version (conditional) |
42
+ | - commons.release.[234].name (conditional) |
43
+ | - commons.release.[234].binary.suffix (optional) |
44
+ | - commons.release.[234].desc (optional) |
45
+ | - commons.release.[234].subdir (optional) |
46
+ | - commons.release.[234].hash (optional, lowercase, [sha512])|
47
+ | |
48
+ | 3) Example Properties |
49
+ | (commons.release.name inherited by parent: |
50
+ | ${project.artifactId}-${commons.release.version} |
51
+ | |
52
+ | <properties> |
53
+ | <commons.componentid>math</commons.componentid> |
54
+ | <commons.release.version>1.2</commons.release.version> |
55
+ | </properties> |
56
+ | |
57
+ +======================================================================+
58
+ -->
59
+ <document xmlns="http://maven.apache.org/XDOC/2.0"
60
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
61
+ xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 https://maven.apache.org/xsd/xdoc-2.0.xsd">
62
+ <properties>
63
+ <title>Download Apache Commons Compress</title>
64
+ <author email="dev@commons.apache.org">Apache Commons Team</author>
65
+ </properties>
66
+ <body>
67
+ <section name="Download Apache Commons Compress">
68
+ <subsection name="Using a Mirror">
69
+ <p>
70
+ We recommend you use a mirror to download our release
71
+ builds, but you <strong>must</strong> <a href="https://www.apache.org/info/verification.html">verify the integrity</a> of
72
+ the downloaded files using signatures downloaded from our main
73
+ distribution directories. Recent releases (48 hours) may not yet
74
+ be available from all the mirrors.
75
+ </p>
76
+
77
+ <p>
78
+ You are currently using <b>[preferred]</b>. If you
79
+ encounter a problem with this mirror, please select another
80
+ mirror. If all mirrors are failing, there are <i>backup</i>
81
+ mirrors (at the end of the mirrors list) that should be
82
+ available.
83
+ <br></br>
84
+ [if-any logo]<a href="[link]"><img align="right" src="[logo]" border="0" alt="Logo"></img></a>[end]
85
+ </p>
86
+
87
+ <form action="[location]" method="get" id="SelectMirror">
88
+ <p>
89
+ Other mirrors:
90
+ <select name="Preferred">
91
+ [if-any http]
92
+ [for http]<option value="[http]">[http]</option>[end]
93
+ [end]
94
+ [if-any ftp]
95
+ [for ftp]<option value="[ftp]">[ftp]</option>[end]
96
+ [end]
97
+ [if-any backup]
98
+ [for backup]<option value="[backup]">[backup] (backup)</option>[end]
99
+ [end]
100
+ </select>
101
+ <input type="submit" value="Change"></input>
102
+ </p>
103
+ </form>
104
+
105
+ <p>
106
+ It is essential that you
107
+ <a href="https://www.apache.org/info/verification.html">verify the integrity</a>
108
+ of downloaded files, preferably using the <code>PGP</code> signature (<code>*.asc</code> files);
109
+ failing that using the <code>SHA512</code> hash (<code>*.sha512</code> checksum files).
110
+ </p>
111
+ <p>
112
+ The <a href="https://downloads.apache.org/commons/KEYS">KEYS</a>
113
+ file contains the public PGP keys used by Apache Commons developers
114
+ to sign releases.
115
+ </p>
116
+ </subsection>
117
+ </section>
118
+ <section name="Apache Commons Compress 1.28.0 ">
119
+ <subsection name="Binaries">
120
+ <table>
121
+ <tr>
122
+ <td><a href="[preferred]/commons/compress/binaries/commons-compress-1.28.0-bin.tar.gz">commons-compress-1.28.0-bin.tar.gz</a></td>
123
+ <td><a href="https://downloads.apache.org/commons/compress/binaries/commons-compress-1.28.0-bin.tar.gz.sha512">sha512</a></td>
124
+ <td><a href="https://downloads.apache.org/commons/compress/binaries/commons-compress-1.28.0-bin.tar.gz.asc">pgp</a></td>
125
+ </tr>
126
+ <tr>
127
+ <td><a href="[preferred]/commons/compress/binaries/commons-compress-1.28.0-bin.zip">commons-compress-1.28.0-bin.zip</a></td>
128
+ <td><a href="https://downloads.apache.org/commons/compress/binaries/commons-compress-1.28.0-bin.zip.sha512">sha512</a></td>
129
+ <td><a href="https://downloads.apache.org/commons/compress/binaries/commons-compress-1.28.0-bin.zip.asc">pgp</a></td>
130
+ </tr>
131
+ </table>
132
+ </subsection>
133
+ <subsection name="Source">
134
+ <table>
135
+ <tr>
136
+ <td><a href="[preferred]/commons/compress/source/commons-compress-1.28.0-src.tar.gz">commons-compress-1.28.0-src.tar.gz</a></td>
137
+ <td><a href="https://downloads.apache.org/commons/compress/source/commons-compress-1.28.0-src.tar.gz.sha512">sha512</a></td>
138
+ <td><a href="https://downloads.apache.org/commons/compress/source/commons-compress-1.28.0-src.tar.gz.asc">pgp</a></td>
139
+ </tr>
140
+ <tr>
141
+ <td><a href="[preferred]/commons/compress/source/commons-compress-1.28.0-src.zip">commons-compress-1.28.0-src.zip</a></td>
142
+ <td><a href="https://downloads.apache.org/commons/compress/source/commons-compress-1.28.0-src.zip.sha512">sha512</a></td>
143
+ <td><a href="https://downloads.apache.org/commons/compress/source/commons-compress-1.28.0-src.zip.asc">pgp</a></td>
144
+ </tr>
145
+ </table>
146
+ </subsection>
147
+ </section>
148
+ <section name="Archives">
149
+ <p>
150
+ Older releases can be obtained from the archives.
151
+ </p>
152
+ <ul>
153
+ <li class="download"><a href="[preferred]/commons/compress/">browse download area</a></li>
154
+ <li><a href="https://archive.apache.org/dist/commons/compress/">archives...</a></li>
155
+ </ul>
156
+ </section>
157
+ </body>
158
+ </document>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/examples.xml ADDED
@@ -0,0 +1,1295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <!--
3
+
4
+ Licensed to the Apache Software Foundation (ASF) under one or more
5
+ contributor license agreements. See the NOTICE file distributed with
6
+ this work for additional information regarding copyright ownership.
7
+ The ASF licenses this file to You under the Apache License, Version 2.0
8
+ (the "License"); you may not use this file except in compliance with
9
+ the License. You may obtain a copy of the License at
10
+
11
+ http://www.apache.org/licenses/LICENSE-2.0
12
+
13
+ Unless required by applicable law or agreed to in writing, software
14
+ distributed under the License is distributed on an "AS IS" BASIS,
15
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ See the License for the specific language governing permissions and
17
+ limitations under the License.
18
+
19
+ -->
20
+ <document xmlns="http://maven.apache.org/XDOC/2.0"
21
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
22
+ xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 https://maven.apache.org/xsd/xdoc-2.0.xsd">
23
+ <properties>
24
+ <title>Commons Compress User Guide</title>
25
+ <author email="dev@commons.apache.org">Apache Commons Team</author>
26
+ </properties>
27
+ <body>
28
+ <section name="General Notes">
29
+
30
+ <subsection name="Archivers and Compressors">
31
+ <p>Commons Compress calls all formats that compress a single
32
+ stream of data compressor formats while all formats that
33
+ collect multiple entries inside a single (potentially
34
+ compressed) archive are archiver formats.</p>
35
+
36
+ <p>The compressor formats supported are gzip, bzip2, XZ, LZMA,
37
+ Pack200, DEFLATE, Brotli, DEFLATE64, ZStandard and Z, the archiver formats are 7z, ar, arj,
38
+ cpio, dump, tar and zip. Pack200 is a special case as it can
39
+ only compress JAR files.</p>
40
+
41
+ <p>We currently only provide read support for arj,
42
+ dump, Brotli, DEFLATE64 and Z. arj can only read uncompressed archives, 7z can read
43
+ archives with many compression and encryption algorithms
44
+ supported by 7z but doesn't support encryption when writing
45
+ archives.</p>
46
+ </subsection>
47
+
48
+ <subsection name="Buffering">
49
+ <p>The stream classes all wrap around streams provided by the
50
+ calling code and they work on them directly without any
51
+ additional buffering. On the other hand most of them will
52
+ benefit from buffering so it is highly recommended that
53
+ users wrap their stream
54
+ in <code>Buffered<em>(In|Out)</em>putStream</code>s before
55
+ using the Commons Compress API.</p>
56
+
57
+ </subsection>
58
+
59
+ <subsection name="Factories">
60
+
61
+ <p>Compress provides factory methods to create input/output
62
+ streams based on the names of the compressor or archiver
63
+ format as well as factory methods that try to guess the
64
+ format of an input stream.</p>
65
+
66
+ <p>To create a compressor writing to a given output by using
67
+ the algorithm name:</p>
68
+ <source><![CDATA[
69
+ CompressorOutputStream gzippedOut = new CompressorStreamFactory()
70
+ .createCompressorOutputStream(CompressorStreamFactory.GZIP, myOutputStream);
71
+ ]]></source>
72
+
73
+ <p>Make the factory guess the input format for a given
74
+ archiver stream:</p>
75
+ <source><![CDATA[
76
+ ArchiveInputStream input = new ArchiveStreamFactory()
77
+ .createArchiveInputStream(originalInput);
78
+ ]]></source>
79
+
80
+ <p>Make the factory guess the input format for a given
81
+ compressor stream:</p>
82
+ <source><![CDATA[
83
+ CompressorInputStream input = new CompressorStreamFactory()
84
+ .createCompressorInputStream(originalInput);
85
+ ]]></source>
86
+
87
+ <p>Note that there is no way to detect the LZMA or Brotli formats so only
88
+ the two-arg version of
89
+ <code>createCompressorInputStream</code> can be used. Prior
90
+ to Compress 1.9 the .Z format hasn't been auto-detected
91
+ either.</p>
92
+
93
+ </subsection>
94
+
95
+ <subsection name="Restricting Memory Usage">
96
+ <p>Starting with Compress 1.14
97
+ <code>CompressorStreamFactory</code> has an optional
98
+ constructor argument that can be used to set an upper limit of
99
+ memory that may be used while decompressing or compressing a
100
+ stream. As of 1.14 this setting only affects decompressing Z,
101
+ XZ and LZMA compressed streams.</p>
102
+ <p>Since Compress 1.19 <code>SevenZFile</code> also has an
103
+ optional constructor to pass an upper memory limit which is supported
104
+ in LZMA compressed streams. Since Compress 1.21 this setting
105
+ also is taken into account when reading the metadata of an archive.</p>
106
+ <p>For the Snappy and LZ4 formats the amount of memory used
107
+ during compression is directly proportional to the window
108
+ size.</p>
109
+ </subsection>
110
+
111
+ <subsection name="Statistics">
112
+ <p>Starting with Compress 1.17 most of the
113
+ <code>CompressorInputStream</code> implementations as well as
114
+ <code>ZipArchiveInputStream</code> and all streams returned by
115
+ <code>ZipFile.getInputStream</code> implement the
116
+ <code>InputStreamStatistics</code>
117
+ interface. <code>SevenZFile</code> provides statistics for the
118
+ current entry via the
119
+ <code>getStatisticsForCurrentEntry</code> method. This
120
+ interface can be used to track progress while extracting a
121
+ stream or to detect potential <a
122
+ href="https://en.wikipedia.org/wiki/Zip_bomb">zip bombs</a>
123
+ when the compression ratio becomes suspiciously large.</p>
124
+ </subsection>
125
+
126
+ </section>
127
+ <section name="Archivers">
128
+
129
+ <subsection name="Unsupported Features">
130
+ <p>Many of the supported formats have developed different
131
+ dialects and extensions and some formats allow for features
132
+ (not yet) supported by Commons Compress.</p>
133
+
134
+ <p>The <code>ArchiveInputStream</code> class provides a method
135
+ <code>canReadEntryData</code> that will return false if
136
+ Commons Compress can detect that an archive uses a feature
137
+ that is not supported by the current implementation. If it
138
+ returns false you should not try to read the entry but skip
139
+ over it.</p>
140
+
141
+ </subsection>
142
+
143
+ <subsection name="Entry Names">
144
+ <p>All archive formats provide meta data about the individual
145
+ archive entries via instances of <code>ArchiveEntry</code> (or
146
+ rather subclasses of it). When reading from an archive the
147
+ information provided the <code>getName</code> method is the
148
+ raw name as stored inside of the archive. There is no
149
+ guarantee the name represents a relative file name or even a
150
+ valid file name on your target operating system at all. You
151
+ should double check the outcome when you try to create file
152
+ names from entry names.</p>
153
+ </subsection>
154
+
155
+ <subsection name="Common Extraction Logic">
156
+ <p>Apart from 7z all formats provide a subclass of
157
+ <code>ArchiveInputStream</code> that can be used to create an
158
+ archive. For 7z <code>SevenZFile</code> provides a similar API
159
+ that does not represent a stream as our implementation
160
+ requires random access to the input and cannot be used for
161
+ general streams. The ZIP implementation can benefit a lot from
162
+ random access as well, see the <a
163
+ href="zip.html#ZipArchiveInputStream_vs_ZipFile">zip
164
+ page</a> for details.</p>
165
+
166
+ <p>Assuming you want to extract an archive to a target
167
+ directory you'd call <code>getNextEntry</code>, verify the
168
+ entry can be read, construct a sane file name from the entry's
169
+ name, create a <code>File</code> and write all contents to
170
+ it - here <code>IOUtils.copy</code> may come handy. You do so
171
+ for every entry until <code>getNextEntry</code> returns
172
+ <code>null</code>.</p>
173
+
174
+ <p>A skeleton might look like:</p>
175
+
176
+ <source><![CDATA[
177
+ File targetDir = ...
178
+ try (ArchiveInputStream i = ... create the stream for your format, use buffering...) {
179
+ ArchiveEntry entry = null;
180
+ while ((entry = i.getNextEntry()) != null) {
181
+ if (!i.canReadEntryData(entry)) {
182
+ // log something?
183
+ continue;
184
+ }
185
+ String name = fileName(targetDir, entry);
186
+ File f = new File(name);
187
+ if (entry.isDirectory()) {
188
+ if (!f.isDirectory() && !f.mkdirs()) {
189
+ throw new IOException("failed to create directory " + f);
190
+ }
191
+ } else {
192
+ File parent = f.getParentFile();
193
+ if (!parent.isDirectory() && !parent.mkdirs()) {
194
+ throw new IOException("failed to create directory " + parent);
195
+ }
196
+ try (OutputStream o = Files.newOutputStream(f.toPath())) {
197
+ IOUtils.copy(i, o);
198
+ }
199
+ }
200
+ }
201
+ }
202
+ ]]></source>
203
+
204
+ <p>where the hypothetical <code>fileName</code> method is
205
+ written by you and provides the absolute name for the file
206
+ that is going to be written on disk. Here you should perform
207
+ checks that ensure the resulting file name actually is a valid
208
+ file name on your operating system or belongs to a file inside
209
+ of <code>targetDir</code> when using the entry's name as
210
+ input.</p>
211
+
212
+ <p>If you want to combine an archive format with a compression
213
+ format - like when reading a "tar.gz" file - you wrap the
214
+ <code>ArchiveInputStream</code> around
215
+ <code>CompressorInputStream</code> for example:</p>
216
+
217
+ <source><![CDATA[
218
+ try (InputStream fi = Files.newInputStream(Paths.get("my.tar.gz"));
219
+ InputStream bi = new BufferedInputStream(fi);
220
+ InputStream gzi = new GzipCompressorInputStream(bi);
221
+ ArchiveInputStream o = new TarArchiveInputStream(gzi)) {
222
+ }
223
+ ]]></source>
224
+
225
+ </subsection>
226
+
227
+ <subsection name="Common Archival Logic">
228
+ <p>Apart from 7z all formats that support writing provide a
229
+ subclass of <code>ArchiveOutputStream</code> that can be used
230
+ to create an archive. For 7z <code>SevenZOutputFile</code>
231
+ provides a similar API that does not represent a stream as our
232
+ implementation requires random access to the output and cannot
233
+ be used for general streams. The
234
+ <code>ZipArchiveOutputStream</code> class will benefit from
235
+ random access as well but can be used for non-seekable streams
236
+ - but not all features will be available and the archive size
237
+ might be slightly bigger, see <a
238
+ href="zip.html#ZipArchiveOutputStream">the zip page</a> for
239
+ details.</p>
240
+
241
+ <p>Assuming you want to add a collection of files to an
242
+ archive, you can first use <code>createArchiveEntry</code> for
243
+ each file. In general this will set a few flags (usually the
244
+ last modified time, the size and the information whether this
245
+ is a file or directory) based on the <code>File</code> or <code>Path</code>
246
+ instance. Alternatively you can create the
247
+ <code>ArchiveEntry</code> subclass corresponding to your
248
+ format directly. Often you may want to set additional flags
249
+ like file permissions or owner information before adding the
250
+ entry to the archive.</p>
251
+
252
+ <p>Next you use <code>putArchiveEntry</code> in order to add
253
+ the entry and then start using <code>write</code> to add the
254
+ content of the entry - here <code>IOUtils.copy</code> may
255
+ come handy. Finally you invoke
256
+ <code>closeArchiveEntry</code> once you've written all content
257
+ and before you add the next entry.</p>
258
+
259
+ <p>Once all entries have been added you'd invoke
260
+ <code>finish</code> and finally <code>close</code> the
261
+ stream.</p>
262
+
263
+ <p>A skeleton might look like:</p>
264
+
265
+ <source><![CDATA[
266
+ Collection<File> filesToArchive = ...
267
+ try (ArchiveOutputStream o = ... create the stream for your format ...) {
268
+ for (File f : filesToArchive) {
269
+ // maybe skip directories for formats like AR that don't store directories
270
+ ArchiveEntry entry = o.createArchiveEntry(f, entryName(f));
271
+ // potentially add more flags to entry
272
+ o.putArchiveEntry(entry);
273
+ if (f.isFile()) {
274
+ try (InputStream i = Files.newInputStream(f.toPath())) {
275
+ IOUtils.copy(i, o);
276
+ }
277
+ }
278
+ o.closeArchiveEntry();
279
+ }
280
+ o.finish();
281
+ }
282
+ ]]></source>
283
+
284
+ <p>where the hypothetical <code>entryName</code> method is
285
+ written by you and provides the name for the entry as it is
286
+ going to be written to the archive.</p>
287
+
288
+ <p>If you want to combine an archive format with a compression
289
+ format - like when creating a "tar.gz" file - you wrap the
290
+ <code>ArchiveOutputStream</code> around a
291
+ <code>CompressorOutputStream</code> for example:</p>
292
+
293
+ <source><![CDATA[
294
+ try (OutputStream fo = Files.newOutputStream(Paths.get("my.tar.gz"));
295
+ OutputStream gzo = new GzipCompressorOutputStream(fo);
296
+ ArchiveOutputStream o = new TarArchiveOutputStream(gzo)) {
297
+ }
298
+ ]]></source>
299
+
300
+ </subsection>
301
+
302
+ <subsection name="7z">
303
+
304
+ <p>Note that Commons Compress currently only supports a subset
305
+ of compression and encryption algorithms used for 7z archives.
306
+ For writing only uncompressed entries, LZMA, LZMA2, BZIP2 and
307
+ Deflate are supported - in addition to those reading supports
308
+ AES-256/SHA-256 and DEFLATE64.</p>
309
+
310
+ <p>Writing multipart archives is not supported at
311
+ all. Multipart archives can be read by concatenating the parts
312
+ for example by using
313
+ <code>MultiReadOnlySeekableByteChannel</code>.</p>
314
+
315
+ <p>7z archives can use multiple compression and encryption
316
+ methods as well as filters combined as a pipeline of methods
317
+ for its entries. Prior to Compress 1.8 you could only specify
318
+ a single method when creating archives - reading archives
319
+ using more than one method has been possible before. Starting
320
+ with Compress 1.8 it is possible to configure the full
321
+ pipeline using the <code>setContentMethods</code> method of
322
+ <code>SevenZOutputFile</code>. Methods are specified in the
323
+ order they appear inside the pipeline when creating the
324
+ archive, you can also specify certain parameters for some of
325
+ the methods - see the Javadocs of
326
+ <code>SevenZMethodConfiguration</code> for details.</p>
327
+
328
+ <p>When reading entries from an archive the
329
+ <code>getContentMethods</code> method of
330
+ <code>SevenZArchiveEntry</code> will properly represent the
331
+ compression/encryption/filter methods but may fail to
332
+ determine the configuration options used. As of Compress 1.8
333
+ only the dictionary size used for LZMA2 can be read.</p>
334
+
335
+ <p>Currently solid compression - compressing multiple files
336
+ as a single block to benefit from patterns repeating across
337
+ files - is only supported when reading archives. This also
338
+ means compression ratio will likely be worse when using
339
+ Commons Compress compared to the native 7z executable.</p>
340
+
341
+ <p>Reading or writing requires a
342
+ <code>SeekableByteChannel</code> that will be obtained
343
+ transparently when reading from or writing to a file. The
344
+ class
345
+ <code>org.apache.commons.compress.utils.SeekableInMemoryByteChannel</code>
346
+ allows you to read from or write to an in-memory archive.</p>
347
+
348
+ <p>Some 7z archives don't contain any names for the archive
349
+ entries. The native 7zip tools derive a default name from the
350
+ name of the archive itself for such entries. Starting with
351
+ Compress 1.19 <code>SevenZFile</code> has an option to mimic
352
+ this behavior, but by default unnamed archive entries will
353
+ return <code>null</code> from
354
+ <code>SevenZArchiveEntry#getName</code>.</p>
355
+
356
+ <p>Adding an entry to a 7z archive:</p>
357
+ <source><![CDATA[
358
+ SevenZOutputFile sevenZOutput = new SevenZOutputFile(file);
359
+ SevenZArchiveEntry entry = sevenZOutput.createArchiveEntry(fileToArchive, name);
360
+ sevenZOutput.putArchiveEntry(entry);
361
+ sevenZOutput.write(contentOfEntry);
362
+ sevenZOutput.closeArchiveEntry();
363
+ ]]></source>
364
+
365
+ <p>Uncompressing a given 7z archive (you would
366
+ certainly add exception handling and make sure all streams
367
+ get closed properly):</p>
368
+ <source><![CDATA[
369
+ SevenZFile sevenZFile = new SevenZFile(new File("archive.7z"));
370
+ SevenZArchiveEntry entry = sevenZFile.getNextEntry();
371
+ byte[] content = new byte[entry.getSize()];
372
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
373
+ sevenZFile.read(content, offset, content.length - offset);
374
+ }
375
+ ]]></source>
376
+
377
+ <p>Uncompressing a given in-memory 7z archive:</p>
378
+ <source><![CDATA[
379
+ byte[] inputData; // 7z archive contents
380
+ SeekableInMemoryByteChannel inMemoryByteChannel = new SeekableInMemoryByteChannel(inputData);
381
+ SevenZFile sevenZFile = new SevenZFile(inMemoryByteChannel);
382
+ SevenZArchiveEntry entry = sevenZFile.getNextEntry();
383
+ sevenZFile.read(); // read current entry's data
384
+ ]]></source>
385
+
386
+ <h4><a name="Encrypted-7z-Archives"></a>Encrypted 7z Archives</h4>
387
+
388
+ <p>Currently Compress supports reading but not writing of
389
+ encrypted archives. When reading an encrypted archive a
390
+ password has to be provided to one of
391
+ <code>SevenZFile</code>'s constructors. If you try to read
392
+ an encrypted archive without specifying a password a
393
+ <code>PasswordRequiredException</code> (a subclass of
394
+ <code>IOException</code>) will be thrown.</p>
395
+
396
+ <p>When specifying the password as a <code>byte[]</code> one
397
+ common mistake is to use the wrong encoding when creating
398
+ the <code>byte[]</code> from a <code>String</code>. The
399
+ <code>SevenZFile</code> class expects the bytes to
400
+ correspond to the UTF16-LE encoding of the password. An
401
+ example of reading an encrypted archive is</p>
402
+
403
+ <source><![CDATA[
404
+ SevenZFile sevenZFile = new SevenZFile(new File("archive.7z"), "secret".getBytes(StandardCharsets.UTF_16LE));
405
+ SevenZArchiveEntry entry = sevenZFile.getNextEntry();
406
+ byte[] content = new byte[entry.getSize()];
407
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
408
+ sevenZFile.read(content, offset, content.length - offset);
409
+ }
410
+ ]]></source>
411
+
412
+ <p>Starting with Compress 1.17 new constructors have been
413
+ added that accept the password as <code>char[]</code> rather
414
+ than a <code>byte[]</code>. We recommend you use these in
415
+ order to avoid the problem above.</p>
416
+
417
+ <source><![CDATA[
418
+ SevenZFile sevenZFile = new SevenZFile(new File("archive.7z"), "secret".toCharArray());
419
+ SevenZArchiveEntry entry = sevenZFile.getNextEntry();
420
+ byte[] content = new byte[entry.getSize()];
421
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
422
+ sevenZFile.read(content, offset, content.length - offset);
423
+ }
424
+ ]]></source>
425
+
426
+ <h4><a name="Random-Access-to-7z-Archives"></a>Random-Access to 7z Archives</h4>
427
+
428
+ <p>Prior to Compress 1.20 7z archives could only be read
429
+ sequentially. The
430
+ <code>getInputStream(SevenZArchiveEntry)</code> method
431
+ introduced with Compress 1.20 now provides random access but
432
+ at least when the archive uses solid compression random access
433
+ will likely be significantly slower than sequential
434
+ access.</p>
435
+
436
+ <h4><a name="Recovering-from-Certain-Broken-7z-Archives"></a>Recovering from Certain Broken 7z Archives</h4>
437
+
438
+ <p><code>SevenZFile</code> tries
439
+ to recover archives that look as if they were part of a
440
+ multi-volume archive where the first volume has been removed
441
+ too early.</p>
442
+
443
+ <p>This option has to be enabled
444
+ explicitly in <code>SevenZFile.Builder</code>. The way recovery
445
+ works is by Compress scanning an archive from the end for
446
+ something that might look like valid 7z metadata and use that,
447
+ if it can successfully parse the block of data. When doing so
448
+ Compress may encounter blocks of metadata that look like the
449
+ metadata of very large archives which in turn may make
450
+ Compress allocate a lot of memory. Therefore we strongly
451
+ recommend you also set a memory limit inside the
452
+ <code>SevenZFile.Builder</code> if you enable recovery.</p>
453
+ </subsection>
454
+
455
+ <subsection name="ar">
456
+
457
+ <p>In addition to the information stored
458
+ in <code>ArchiveEntry</code> a <code>ArArchiveEntry</code>
459
+ stores information about the owner user and group as well as
460
+ Unix permissions.</p>
461
+
462
+ <p>Adding an entry to an ar archive:</p>
463
+ <source><![CDATA[
464
+ ArArchiveEntry entry = new ArArchiveEntry(name, size);
465
+ arOutput.putArchiveEntry(entry);
466
+ arOutput.write(contentOfEntry);
467
+ arOutput.closeArchiveEntry();
468
+ ]]></source>
469
+
470
+ <p>Reading entries from an ar archive:</p>
471
+ <source><![CDATA[
472
+ ArArchiveEntry entry = (ArArchiveEntry) arInput.getNextEntry();
473
+ byte[] content = new byte[entry.getSize()];
474
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
475
+ arInput.read(content, offset, content.length - offset);
476
+ }
477
+ ]]></source>
478
+
479
+ <p>Traditionally the AR format doesn't allow file names longer
480
+ than 16 characters. There are two variants that circumvent
481
+ this limitation in different ways, the GNU/SRV4 and the BSD
482
+ variant. Commons Compress 1.0 to 1.2 can only read archives
483
+ using the GNU/SRV4 variant, support for the BSD variant has
484
+ been added in Commons Compress 1.3. Commons Compress 1.3
485
+ also optionally supports writing archives with file names
486
+ longer than 16 characters using the BSD dialect, writing
487
+ the SVR4/GNU dialect is not supported.</p>
488
+
489
+ <table>
490
+ <thead>
491
+ <tr>
492
+ <th>Version of Apache Commons Compress</th>
493
+ <th>Support for Traditional AR Format</th>
494
+ <th>Support for GNU/SRV4 Dialect</th>
495
+ <th>Support for BSD Dialect</th>
496
+ </tr>
497
+ </thead>
498
+ <tbody>
499
+ <tr>
500
+ <td>1.0 to 1.2</td>
501
+ <td>read/write</td>
502
+ <td>read</td>
503
+ <td>-</td>
504
+ </tr>
505
+ <tr>
506
+ <td>1.3 and later</td>
507
+ <td>read/write</td>
508
+ <td>read</td>
509
+ <td>read/write</td>
510
+ </tr>
511
+ </tbody>
512
+ </table>
513
+
514
+ <p>It is not possible to detect the end of an AR archive in a
515
+ reliable way so <code>ArArchiveInputStream</code> will read
516
+ until it reaches the end of the stream or fails to parse the
517
+ stream's content as AR entries.</p>
518
+
519
+ </subsection>
520
+
521
+ <subsection name="arj">
522
+
523
+ <p>Note that Commons Compress doesn't support compressed,
524
+ encrypted or multi-volume ARJ archives, yet.</p>
525
+
526
+ <p>Uncompressing a given arj archive (you would
527
+ certainly add exception handling and make sure all streams
528
+ get closed properly):</p>
529
+ <source><![CDATA[
530
+ ArjArchiveEntry entry = arjInput.getNextEntry();
531
+ byte[] content = new byte[entry.getSize()];
532
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
533
+ arjInput.read(content, offset, content.length - offset);
534
+ }
535
+ ]]></source>
536
+ </subsection>
537
+
538
+ <subsection name="cpio">
539
+
540
+ <p>In addition to the information stored
541
+ in <code>ArchiveEntry</code> a <code>CpioArchiveEntry</code>
542
+ stores various attributes including information about the
543
+ original owner and permissions.</p>
544
+
545
+ <p>The cpio package supports the "new portable" as well as the
546
+ "old" format of CPIO archives in their binary, ASCII and
547
+ "with CRC" variants.</p>
548
+
549
+ <p>Adding an entry to a cpio archive:</p>
550
+ <source><![CDATA[
551
+ CpioArchiveEntry entry = new CpioArchiveEntry(name, size);
552
+ cpioOutput.putArchiveEntry(entry);
553
+ cpioOutput.write(contentOfEntry);
554
+ cpioOutput.closeArchiveEntry();
555
+ ]]></source>
556
+
557
+ <p>Reading entries from an cpio archive:</p>
558
+ <source><![CDATA[
559
+ CpioArchiveEntry entry = cpioInput.getNextCPIOEntry();
560
+ byte[] content = new byte[entry.getSize()];
561
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
562
+ cpioInput.read(content, offset, content.length - offset);
563
+ }
564
+ ]]></source>
565
+
566
+ <p>Traditionally CPIO archives are written in blocks of 512
567
+ bytes - the block size is a configuration parameter of the
568
+ <code>Cpio*Stream</code>'s constructors. Starting with version
569
+ 1.5 <code>CpioArchiveInputStream</code> will consume the
570
+ padding written to fill the current block when the end of the
571
+ archive is reached. Unfortunately many CPIO implementations
572
+ use larger block sizes so there may be more zero-byte padding
573
+ left inside the original input stream after the archive has
574
+ been consumed completely.</p>
575
+
576
+ </subsection>
577
+
578
+ <subsection name="jar">
579
+ <p>In general, JAR archives are ZIP files, so the JAR package
580
+ supports all options provided by the <a href="#zip">ZIP</a> package.</p>
581
+
582
+ <p>To be interoperable JAR archives should always be created
583
+ using the UTF-8 encoding for file names (which is the
584
+ default).</p>
585
+
586
+ <p>Archives created using <code>JarArchiveOutputStream</code>
587
+ will implicitly add a <code>JarMarker</code> extra field to
588
+ the very first archive entry of the archive which will make
589
+ Solaris recognize them as Java archives and allows them to
590
+ be used as executables.</p>
591
+
592
+ <p>Note that <code>ArchiveStreamFactory</code> doesn't
593
+ distinguish ZIP archives from JAR archives, so if you use
594
+ the one-argument <code>createArchiveInputStream</code>
595
+ method on a JAR archive, it will still return the more
596
+ generic <code>ZipArchiveInputStream</code>.</p>
597
+
598
+ <p>The <code>JarArchiveEntry</code> class contains fields for
599
+ certificates and attributes that are planned to be supported
600
+ in the future but are not supported as of Compress 1.0.</p>
601
+
602
+ <p>Adding an entry to a jar archive:</p>
603
+ <source><![CDATA[
604
+ JarArchiveEntry entry = new JarArchiveEntry(name, size);
605
+ entry.setSize(size);
606
+ jarOutput.putArchiveEntry(entry);
607
+ jarOutput.write(contentOfEntry);
608
+ jarOutput.closeArchiveEntry();
609
+ ]]></source>
610
+
611
+ <p>Reading entries from an jar archive:</p>
612
+ <source><![CDATA[
613
+ JarArchiveEntry entry = jarInput.getNextJarEntry();
614
+ byte[] content = new byte[entry.getSize()];
615
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
616
+ jarInput.read(content, offset, content.length - offset);
617
+ }
618
+ ]]></source>
619
+ </subsection>
620
+
621
+ <subsection name="dump">
622
+
623
+ <p>In addition to the information stored
624
+ in <code>ArchiveEntry</code> a <code>DumpArchiveEntry</code>
625
+ stores various attributes including information about the
626
+ original owner and permissions.</p>
627
+
628
+ <p>As of Commons Compress 1.3 only dump archives using the
629
+ new-fs format - this is the most common variant - are
630
+ supported. Right now this library supports uncompressed and
631
+ ZLIB compressed archives and can not write archives at
632
+ all.</p>
633
+
634
+ <p>Reading entries from an dump archive:</p>
635
+ <source><![CDATA[
636
+ DumpArchiveEntry entry = dumpInput.getNextDumpEntry();
637
+ byte[] content = new byte[entry.getSize()];
638
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
639
+ dumpInput.read(content, offset, content.length - offset);
640
+ }
641
+ ]]></source>
642
+
643
+ <p>Prior to version 1.5 <code>DumpArchiveInputStream</code>
644
+ would close the original input once it had read the last
645
+ record. Starting with version 1.5 it will not close the
646
+ stream implicitly.</p>
647
+
648
+ </subsection>
649
+
650
+ <subsection name="tar">
651
+
652
+ <p>The TAR package has a <a href="tar.html">dedicated
653
+ documentation page</a>.</p>
654
+
655
+ <p>Adding an entry to a tar archive:</p>
656
+ <source><![CDATA[
657
+ TarArchiveEntry entry = new TarArchiveEntry(name);
658
+ entry.setSize(size);
659
+ tarOutput.putArchiveEntry(entry);
660
+ tarOutput.write(contentOfEntry);
661
+ tarOutput.closeArchiveEntry();
662
+ ]]></source>
663
+
664
+ <p>Reading entries from an tar archive:</p>
665
+ <source><![CDATA[
666
+ TarArchiveEntry entry = tarInput.getNextTarEntry();
667
+ byte[] content = new byte[entry.getSize()];
668
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
669
+ tarInput.read(content, offset, content.length - offset);
670
+ }
671
+ ]]></source>
672
+ </subsection>
673
+
674
+ <subsection name="zip">
675
+ <p>The ZIP package has a <a href="zip.html">dedicated
676
+ documentation page</a>.</p>
677
+
678
+ <p>Adding an entry to a zip archive:</p>
679
+ <source><![CDATA[
680
+ ZipArchiveEntry entry = new ZipArchiveEntry(name);
681
+ entry.setSize(size);
682
+ zipOutput.putArchiveEntry(entry);
683
+ zipOutput.write(contentOfEntry);
684
+ zipOutput.closeArchiveEntry();
685
+ ]]></source>
686
+
687
+ <p><code>ZipArchiveOutputStream</code> can use some internal
688
+ optimizations exploiting <code>SeekableByteChannel</code> if it
689
+ knows it is writing to a seekable output rather than a non-seekable
690
+ stream. If you are writing to a file, you should use the
691
+ constructor that accepts a <code>File</code> or
692
+ <code>SeekableByteChannel</code> argument rather
693
+ than the one using an <code>OutputStream</code> or the
694
+ factory method in <code>ArchiveStreamFactory</code>.</p>
695
+
696
+ <p>Reading entries from an zip archive:</p>
697
+ <source><![CDATA[
698
+ ZipArchiveEntry entry = zipInput.getNextZipEntry();
699
+ byte[] content = new byte[entry.getSize()];
700
+ LOOP UNTIL entry.getSize() HAS BEEN READ {
701
+ zipInput.read(content, offset, content.length - offset);
702
+ }
703
+ ]]></source>
704
+
705
+ <p>Reading entries from an zip archive using the
706
+ recommended <code>ZipFile</code> class:</p>
707
+ <source><![CDATA[
708
+ ZipArchiveEntry entry = zipFile.getEntry(name);
709
+ InputStream content = zipFile.getInputStream(entry);
710
+ try {
711
+ READ UNTIL content IS EXHAUSTED
712
+ } finally {
713
+ content.close();
714
+ }
715
+ ]]></source>
716
+
717
+ <p>Reading entries from an in-memory zip archive using
718
+ <code>SeekableInMemoryByteChannel</code> and <code>ZipFile</code> class:</p>
719
+ <source><![CDATA[
720
+ byte[] inputData; // zip archive contents
721
+ SeekableInMemoryByteChannel inMemoryByteChannel = new SeekableInMemoryByteChannel(inputData);
722
+ ZipFile zipFile = new ZipFile(inMemoryByteChannel);
723
+ ZipArchiveEntry archiveEntry = zipFile.getEntry("entryName");
724
+ InputStream inputStream = zipFile.getInputStream(archiveEntry);
725
+ inputStream.read() // read data from the input stream
726
+ ]]></source>
727
+
728
+ <p>Creating a zip file with multiple threads:</p>
729
+ <p>
730
+ A simple implementation to create a zip file might look like this:
731
+ </p>
732
+
733
+ <source><![CDATA[
734
+ public class ScatterSample {
735
+
736
+ ParallelScatterZipCreator scatterZipCreator = new ParallelScatterZipCreator();
737
+ ScatterZipOutputStream dirs = ScatterZipOutputStream.fileBased(File.createTempFile("scatter-dirs", "tmp"));
738
+
739
+ public ScatterSample() throws IOException {
740
+ }
741
+
742
+ public void addEntry(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier streamSupplier) throws IOException {
743
+ if (zipArchiveEntry.isDirectory() && !zipArchiveEntry.isUnixSymlink())
744
+ dirs.addArchiveEntry(ZipArchiveEntryRequest.createZipArchiveEntryRequest(zipArchiveEntry, streamSupplier));
745
+ else
746
+ scatterZipCreator.addArchiveEntry( zipArchiveEntry, streamSupplier);
747
+ }
748
+
749
+ public void writeTo(ZipArchiveOutputStream zipArchiveOutputStream)
750
+ throws IOException, ExecutionException, InterruptedException {
751
+ dirs.writeTo(zipArchiveOutputStream);
752
+ dirs.close();
753
+ scatterZipCreator.writeTo(zipArchiveOutputStream);
754
+ }
755
+ }
756
+ ]]></source>
757
+ </subsection>
758
+
759
+ </section>
760
+ <section name="Compressors">
761
+
762
+ <subsection name="Concatenated Streams">
763
+ <p>For the bzip2, gzip and XZ formats as well as the framed
764
+ lz4 format a single compressed file
765
+ may actually consist of several streams that will be
766
+ concatenated by the command line utilities when decompressing
767
+ them. Starting with Commons Compress 1.4 the
768
+ <code>*CompressorInputStream</code>s for these formats support
769
+ concatenating streams as well, but they won't do so by
770
+ default. You must use the two-arg constructor and explicitly
771
+ enable the support.</p>
772
+ </subsection>
773
+
774
+ <subsection name="Brotli">
775
+
776
+ <p>The implementation of this package is provided by the
777
+ <a href="https://github.com/google/brotli">Google Brotli dec</a> library.</p>
778
+
779
+ <p>Uncompressing a given Brotli compressed file (you would
780
+ certainly add exception handling and make sure all streams
781
+ get closed properly):</p>
782
+ <source><![CDATA[
783
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.br"));
784
+ BufferedInputStream in = new BufferedInputStream(fin);
785
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
786
+ BrotliCompressorInputStream brIn = new BrotliCompressorInputStream(in);
787
+ final byte[] buffer = new byte[buffersize];
788
+ int n = 0;
789
+ while (-1 != (n = brIn.read(buffer))) {
790
+ out.write(buffer, 0, n);
791
+ }
792
+ out.close();
793
+ brIn.close();
794
+ ]]></source>
795
+ </subsection>
796
+
797
+ <subsection name="bzip2">
798
+
799
+ <p>Note that <code>BZipCompressorOutputStream</code> keeps
800
+ hold of some big data structures in memory. While it is
801
+ recommended for <em>any</em> stream that you close it as soon as
802
+ you no longer need it, this is even more important
803
+ for <code>BZipCompressorOutputStream</code>.</p>
804
+
805
+ <p>Uncompressing a given bzip2 compressed file (you would
806
+ certainly add exception handling and make sure all streams
807
+ get closed properly):</p>
808
+ <source><![CDATA[
809
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.bz2"));
810
+ BufferedInputStream in = new BufferedInputStream(fin);
811
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
812
+ BZip2CompressorInputStream bzIn = new BZip2CompressorInputStream(in);
813
+ final byte[] buffer = new byte[buffersize];
814
+ int n = 0;
815
+ while (-1 != (n = bzIn.read(buffer))) {
816
+ out.write(buffer, 0, n);
817
+ }
818
+ out.close();
819
+ bzIn.close();
820
+ ]]></source>
821
+
822
+ <p>Compressing a given file using bzip2 (you would
823
+ certainly add exception handling and make sure all streams
824
+ get closed properly):</p>
825
+ <source><![CDATA[
826
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
827
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.bz2"));
828
+ BufferedOutputStream out = new BufferedOutputStream(fout);
829
+ BZip2CompressorOutputStream bzOut = new BZip2CompressorOutputStream(out);
830
+ final byte[] buffer = new byte[buffersize];
831
+ int n = 0;
832
+ while (-1 != (n = in.read(buffer))) {
833
+ bzOut.write(buffer, 0, n);
834
+ }
835
+ bzOut.close();
836
+ in.close();
837
+ ]]></source>
838
+
839
+ </subsection>
840
+
841
+ <subsection name="DEFLATE">
842
+
843
+ <p>The implementation of the DEFLATE/INFLATE code used by this
844
+ package is provided by the <code>java.util.zip</code> package
845
+ of the Java class library.</p>
846
+
847
+ <p>Uncompressing a given DEFLATE compressed file (you would
848
+ certainly add exception handling and make sure all streams
849
+ get closed properly):</p>
850
+ <source><![CDATA[
851
+ InputStream fin = Files.newInputStream(Paths.get("some-file"));
852
+ BufferedInputStream in = new BufferedInputStream(fin);
853
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
854
+ DeflateCompressorInputStream defIn = new DeflateCompressorInputStream(in);
855
+ final byte[] buffer = new byte[buffersize];
856
+ int n = 0;
857
+ while (-1 != (n = defIn.read(buffer))) {
858
+ out.write(buffer, 0, n);
859
+ }
860
+ out.close();
861
+ defIn.close();
862
+ ]]></source>
863
+
864
+ <p>Compressing a given file using DEFLATE (you would
865
+ certainly add exception handling and make sure all streams
866
+ get closed properly):</p>
867
+ <source><![CDATA[
868
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
869
+ OutputStream fout = Files.newOutputStream(Paths.get("some-file"));
870
+ BufferedOutputStream out = new BufferedOutputStream(fout);
871
+ DeflateCompressorOutputStream defOut = new DeflateCompressorOutputStream(out);
872
+ final byte[] buffer = new byte[buffersize];
873
+ int n = 0;
874
+ while (-1 != (n = in.read(buffer))) {
875
+ defOut.write(buffer, 0, n);
876
+ }
877
+ defOut.close();
878
+ in.close();
879
+ ]]></source>
880
+
881
+ </subsection>
882
+
883
+ <subsection name="DEFLATE64">
884
+
885
+ <p>Uncompressing a given DEFLATE64 compressed file (you would
886
+ certainly add exception handling and make sure all streams
887
+ get closed properly):</p>
888
+ <source><![CDATA[
889
+ InputStream fin = Files.newInputStream(Paths.get("some-file"));
890
+ BufferedInputStream in = new BufferedInputStream(fin);
891
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
892
+ Deflate64CompressorInputStream defIn = new Deflate64CompressorInputStream(in);
893
+ final byte[] buffer = new byte[buffersize];
894
+ int n = 0;
895
+ while (-1 != (n = defIn.read(buffer))) {
896
+ out.write(buffer, 0, n);
897
+ }
898
+ out.close();
899
+ defIn.close();
900
+ ]]></source>
901
+
902
+ </subsection>
903
+
904
+ <subsection name="gzip">
905
+
906
+ <p>The implementation of the DEFLATE/INFLATE code used by this
907
+ package is provided by the <code>java.util.zip</code> package
908
+ of the Java class library.</p>
909
+
910
+ <p>Uncompressing a given gzip compressed file (you would
911
+ certainly add exception handling and make sure all streams
912
+ get closed properly):</p>
913
+ <source><![CDATA[
914
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.gz"));
915
+ BufferedInputStream in = new BufferedInputStream(fin);
916
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
917
+ GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
918
+ final byte[] buffer = new byte[buffersize];
919
+ int n = 0;
920
+ while (-1 != (n = gzIn.read(buffer))) {
921
+ out.write(buffer, 0, n);
922
+ }
923
+ out.close();
924
+ gzIn.close();
925
+ ]]></source>
926
+
927
+ <p>Compressing a given file using gzip (you would
928
+ certainly add exception handling and make sure all streams
929
+ get closed properly):</p>
930
+ <source><![CDATA[
931
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
932
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.gz"));
933
+ BufferedOutputStream out = new BufferedOutputStream(fout);
934
+ GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(out);
935
+ final byte[] buffer = new byte[buffersize];
936
+ int n = 0;
937
+ while (-1 != (n = in.read(buffer))) {
938
+ gzOut.write(buffer, 0, n);
939
+ }
940
+ gzOut.close();
941
+ in.close();
942
+ ]]></source>
943
+
944
+ </subsection>
945
+
946
+ <subsection name="LZ4">
947
+
948
+ <p>There are two different "formats" used for <a
949
+ href="http://lz4.github.io/lz4/">lz4</a>. The format called
950
+ "block format" only contains the raw compressed data while the
951
+ other provides a higher level "frame format" - Commons
952
+ Compress offers two different stream classes for reading or
953
+ writing either format.</p>
954
+
955
+ <p>Uncompressing a given framed LZ4 file (you would
956
+ certainly add exception handling and make sure all streams
957
+ get closed properly):</p>
958
+ <source><![CDATA[
959
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.lz4"));
960
+ BufferedInputStream in = new BufferedInputStream(fin);
961
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
962
+ FramedLZ4CompressorInputStream zIn = new FramedLZ4CompressorInputStream(in);
963
+ final byte[] buffer = new byte[buffersize];
964
+ int n = 0;
965
+ while (-1 != (n = zIn.read(buffer))) {
966
+ out.write(buffer, 0, n);
967
+ }
968
+ out.close();
969
+ zIn.close();
970
+ ]]></source>
971
+
972
+ <p>Compressing a given file using the LZ4 frame format (you would
973
+ certainly add exception handling and make sure all streams
974
+ get closed properly):</p>
975
+ <source><![CDATA[
976
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
977
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.lz4"));
978
+ BufferedOutputStream out = new BufferedOutputStream(fout);
979
+ FramedLZ4CompressorOutputStream lzOut = new FramedLZ4CompressorOutputStream(out);
980
+ final byte[] buffer = new byte[buffersize];
981
+ int n = 0;
982
+ while (-1 != (n = in.read(buffer))) {
983
+ lzOut.write(buffer, 0, n);
984
+ }
985
+ lzOut.close();
986
+ in.close();
987
+ ]]></source>
988
+
989
+ </subsection>
990
+
991
+ <subsection name="lzma">
992
+
993
+ <p>The implementation of this package is provided by the
994
+ public domain <a href="https://tukaani.org/xz/java.html">XZ
995
+ for Java</a> library.</p>
996
+
997
+ <p>Uncompressing a given LZMA compressed file (you would
998
+ certainly add exception handling and make sure all streams
999
+ get closed properly):</p>
1000
+ <source><![CDATA[
1001
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.lzma"));
1002
+ BufferedInputStream in = new BufferedInputStream(fin);
1003
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
1004
+ LZMACompressorInputStream lzmaIn = new LZMACompressorInputStream(in);
1005
+ final byte[] buffer = new byte[buffersize];
1006
+ int n = 0;
1007
+ while (-1 != (n = xzIn.read(buffer))) {
1008
+ out.write(buffer, 0, n);
1009
+ }
1010
+ out.close();
1011
+ lzmaIn.close();
1012
+ ]]></source>
1013
+
1014
+ <p>Compressing a given file using LZMA (you would
1015
+ certainly add exception handling and make sure all streams
1016
+ get closed properly):</p>
1017
+ <source><![CDATA[
1018
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
1019
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.lzma"));
1020
+ BufferedOutputStream out = new BufferedOutputStream(fout);
1021
+ LZMACompressorOutputStream lzOut = new LZMACompressorOutputStream(out);
1022
+ final byte[] buffer = new byte[buffersize];
1023
+ int n = 0;
1024
+ while (-1 != (n = in.read(buffer))) {
1025
+ lzOut.write(buffer, 0, n);
1026
+ }
1027
+ lzOut.close();
1028
+ in.close();
1029
+ ]]></source>
1030
+
1031
+ </subsection>
1032
+
1033
+ <subsection name="Pack200">
1034
+
1035
+ <p>The Pack200 package has a <a href="pack200.html">dedicated
1036
+ documentation page</a>.</p>
1037
+
1038
+ <p>The implementation of this package used to be provided by
1039
+ the <code>java.util.zip</code> package of the Java class
1040
+ library. Starting with Compress 1.21 the implementation uses
1041
+ a copy of the pack200 code of the now retired Apache
1042
+ Harmony&#x2122; project that ships with Compress itself.</p>
1043
+
1044
+ <p>Uncompressing a given pack200 compressed file (you would
1045
+ certainly add exception handling and make sure all streams
1046
+ get closed properly):</p>
1047
+ <source><![CDATA[
1048
+ InputStream fin = Files.newInputStream(Paths.get("archive.pack"));
1049
+ BufferedInputStream in = new BufferedInputStream(fin);
1050
+ OutputStream out = Files.newOutputStream(Paths.get("archive.jar"));
1051
+ Pack200CompressorInputStream pIn = new Pack200CompressorInputStream(in);
1052
+ final byte[] buffer = new byte[buffersize];
1053
+ int n = 0;
1054
+ while (-1 != (n = pIn.read(buffer))) {
1055
+ out.write(buffer, 0, n);
1056
+ }
1057
+ out.close();
1058
+ pIn.close();
1059
+ ]]></source>
1060
+
1061
+ <p>Compressing a given jar using pack200 (you would
1062
+ certainly add exception handling and make sure all streams
1063
+ get closed properly):</p>
1064
+ <source><![CDATA[
1065
+ InputStream in = Files.newInputStream(Paths.get("archive.jar"));
1066
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.pack"));
1067
+ BufferedOutputStream out = new BufferedInputStream(fout);
1068
+ Pack200CompressorOutputStream pOut = new Pack200CompressorOutputStream(out);
1069
+ final byte[] buffer = new byte[buffersize];
1070
+ int n = 0;
1071
+ while (-1 != (n = in.read(buffer))) {
1072
+ pOut.write(buffer, 0, n);
1073
+ }
1074
+ pOut.close();
1075
+ in.close();
1076
+ ]]></source>
1077
+
1078
+ </subsection>
1079
+
1080
+ <subsection name="Snappy">
1081
+
1082
+ <p>There are two different "formats" used for <a
1083
+ href="https://github.com/google/snappy/">Snappy</a>, one only
1084
+ contains the raw compressed data while the other provides a
1085
+ higher level "framing format" - Commons Compress offers two
1086
+ different stream classes for reading either format.</p>
1087
+
1088
+ <p>Starting with 1.12 we've added support for different
1089
+ dialects of the framing format that can be specified when
1090
+ constructing the stream. The <code>STANDARD</code> dialect
1091
+ follows the "framing format" specification while the
1092
+ <code>IWORK_ARCHIVE</code> dialect can be used to parse IWA
1093
+ files that are part of Apple's iWork 13 format. If no dialect
1094
+ has been specified, <code>STANDARD</code> is used. Only the
1095
+ <code>STANDARD</code> format can be detected by
1096
+ <code>CompressorStreamFactory</code>.</p>
1097
+
1098
+ <p>Uncompressing a given framed Snappy file (you would
1099
+ certainly add exception handling and make sure all streams
1100
+ get closed properly):</p>
1101
+ <source><![CDATA[
1102
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.sz"));
1103
+ BufferedInputStream in = new BufferedInputStream(fin);
1104
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
1105
+ FramedSnappyCompressorInputStream zIn = new FramedSnappyCompressorInputStream(in);
1106
+ final byte[] buffer = new byte[buffersize];
1107
+ int n = 0;
1108
+ while (-1 != (n = zIn.read(buffer))) {
1109
+ out.write(buffer, 0, n);
1110
+ }
1111
+ out.close();
1112
+ zIn.close();
1113
+ ]]></source>
1114
+
1115
+ <p>Compressing a given file using framed Snappy (you would
1116
+ certainly add exception handling and make sure all streams
1117
+ get closed properly):</p>
1118
+ <source><![CDATA[
1119
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
1120
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.sz"));
1121
+ BufferedOutputStream out = new BufferedOutputStream(fout);
1122
+ FramedSnappyCompressorOutputStream snOut = new FramedSnappyCompressorOutputStream(out);
1123
+ final byte[] buffer = new byte[buffersize];
1124
+ int n = 0;
1125
+ while (-1 != (n = in.read(buffer))) {
1126
+ snOut.write(buffer, 0, n);
1127
+ }
1128
+ snOut.close();
1129
+ in.close();
1130
+ ]]></source>
1131
+
1132
+ </subsection>
1133
+
1134
+ <subsection name="XZ">
1135
+
1136
+ <p>The implementation of this package is provided by the
1137
+ public domain <a href="https://tukaani.org/xz/java.html">XZ
1138
+ for Java</a> library.</p>
1139
+
1140
+ <p>When you try to open an XZ stream for reading using
1141
+ <code>CompressorStreamFactory</code>, Commons Compress will
1142
+ check whether the XZ for Java library is available. Starting
1143
+ with Compress 1.9 the result of this check will be cached
1144
+ unless Compress finds OSGi classes in its classpath. You can
1145
+ use <code>XZUtils#setCacheXZAvailability</code> to override
1146
+ this default behavior.</p>
1147
+
1148
+ <p>Uncompressing a given XZ compressed file (you would
1149
+ certainly add exception handling and make sure all streams
1150
+ get closed properly):</p>
1151
+ <source><![CDATA[
1152
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.xz"));
1153
+ BufferedInputStream in = new BufferedInputStream(fin);
1154
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
1155
+ XZCompressorInputStream xzIn = new XZCompressorInputStream(in);
1156
+ final byte[] buffer = new byte[buffersize];
1157
+ int n = 0;
1158
+ while (-1 != (n = xzIn.read(buffer))) {
1159
+ out.write(buffer, 0, n);
1160
+ }
1161
+ out.close();
1162
+ xzIn.close();
1163
+ ]]></source>
1164
+
1165
+ <p>Compressing a given file using XZ (you would
1166
+ certainly add exception handling and make sure all streams
1167
+ get closed properly):</p>
1168
+ <source><![CDATA[
1169
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
1170
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.xz"));
1171
+ BufferedOutputStream out = new BufferedInputStream(fout);
1172
+ XZCompressorOutputStream xzOut = new XZCompressorOutputStream(out);
1173
+ final byte[] buffer = new byte[buffersize];
1174
+ int n = 0;
1175
+ while (-1 != (n = in.read(buffer))) {
1176
+ xzOut.write(buffer, 0, n);
1177
+ }
1178
+ xzOut.close();
1179
+ in.close();
1180
+ ]]></source>
1181
+
1182
+ </subsection>
1183
+
1184
+ <subsection name="Z">
1185
+
1186
+ <p>Uncompressing a given Z compressed file (you would
1187
+ certainly add exception handling and make sure all streams
1188
+ get closed properly):</p>
1189
+ <source><![CDATA[
1190
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.Z"));
1191
+ BufferedInputStream in = new BufferedInputStream(fin);
1192
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
1193
+ ZCompressorInputStream zIn = new ZCompressorInputStream(in);
1194
+ final byte[] buffer = new byte[buffersize];
1195
+ int n = 0;
1196
+ while (-1 != (n = zIn.read(buffer))) {
1197
+ out.write(buffer, 0, n);
1198
+ }
1199
+ out.close();
1200
+ zIn.close();
1201
+ ]]></source>
1202
+
1203
+ </subsection>
1204
+
1205
+ <subsection name="Zstandard">
1206
+
1207
+ <p>The implementation of this package is provided by the
1208
+ <a href="https://github.com/luben/zstd-jni">Zstandard JNI</a> library.</p>
1209
+
1210
+ <p>Uncompressing a given Zstandard compressed file (you would
1211
+ certainly add exception handling and make sure all streams
1212
+ get closed properly):</p>
1213
+ <source><![CDATA[
1214
+ InputStream fin = Files.newInputStream(Paths.get("archive.tar.zstd"));
1215
+ BufferedInputStream in = new BufferedInputStream(fin);
1216
+ OutputStream out = Files.newOutputStream(Paths.get("archive.tar"));
1217
+ ZstdCompressorInputStream zsIn = new ZstdCompressorInputStream(in);
1218
+ final byte[] buffer = new byte[buffersize];
1219
+ int n = 0;
1220
+ while (-1 != (n = zsIn.read(buffer))) {
1221
+ out.write(buffer, 0, n);
1222
+ }
1223
+ out.close();
1224
+ zsIn.close();
1225
+ ]]></source>
1226
+
1227
+ <p>Compressing a given file using the Zstandard format (you
1228
+ would certainly add exception handling and make sure all
1229
+ streams get closed properly):</p>
1230
+ <source><![CDATA[
1231
+ InputStream in = Files.newInputStream(Paths.get("archive.tar"));
1232
+ OutputStream fout = Files.newOutputStream(Paths.get("archive.tar.zstd"));
1233
+ BufferedOutputStream out = new BufferedOutputStream(fout);
1234
+ ZstdCompressorOutputStream zOut = new ZstdCompressorOutputStream(out);
1235
+ final byte[] buffer = new byte[buffersize];
1236
+ int n = 0;
1237
+ while (-1 != (n = in.read(buffer))) {
1238
+ zOut.write(buffer, 0, n);
1239
+ }
1240
+ zOut.close();
1241
+ in.close();
1242
+ ]]></source>
1243
+
1244
+ </subsection>
1245
+ </section>
1246
+
1247
+ <section name="Extending Commons Compress">
1248
+
1249
+ <p>
1250
+ Starting in release 1.13, it is now possible to add Compressor- and ArchiverStream implementations using the
1251
+ Java's <a href="https://docs.oracle.com/javase/7/docs/api/java/util/ServiceLoader.html">ServiceLoader</a>
1252
+ mechanism.
1253
+ </p>
1254
+
1255
+ <subsection name="Extending Commons Compress Compressors">
1256
+
1257
+ <p>
1258
+ To provide your own compressor, you must make available on the classpath a file called
1259
+ <code>META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider</code>.
1260
+ </p>
1261
+ <p>
1262
+ This file MUST contain one fully-qualified class name per line.
1263
+ </p>
1264
+ <p>
1265
+ For example:
1266
+ </p>
1267
+ <pre>org.apache.commons.compress.compressors.TestCompressorStreamProvider</pre>
1268
+ <p>
1269
+ This class MUST implement the Commons Compress interface
1270
+ <a href="apidocs/org/apache/commons/compress/compressors/CompressorStreamProvider.html">org.apache.commons.compress.compressors.CompressorStreamProvider</a>.
1271
+ </p>
1272
+ </subsection>
1273
+
1274
+ <subsection name="Extending Commons Compress Archivers">
1275
+
1276
+ <p>
1277
+ To provide your own compressor, you must make available on the classpath a file called
1278
+ <code>META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider</code>.
1279
+ </p>
1280
+ <p>
1281
+ This file MUST contain one fully-qualified class name per line.
1282
+ </p>
1283
+ <p>
1284
+ For example:
1285
+ </p>
1286
+ <pre>org.apache.commons.compress.archivers.TestArchiveStreamProvider</pre>
1287
+ <p>
1288
+ This class MUST implement the Commons Compress interface
1289
+ <a href="apidocs/org/apache/commons/compress/archivers/ArchiveStreamProvider.html">org.apache.commons.compress.archivers.ArchiveStreamProvider</a>.
1290
+ </p>
1291
+ </subsection>
1292
+
1293
+ </section>
1294
+ </body>
1295
+ </document>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/limitations.xml ADDED
@@ -0,0 +1,259 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <!--
3
+ Licensed to the Apache Software Foundation (ASF) under one or more
4
+ contributor license agreements. See the NOTICE file distributed with
5
+ this work for additional information regarding copyright ownership.
6
+ The ASF licenses this file to You under the Apache License, Version 2.0
7
+ (the "License"); you may not use this file except in compliance with
8
+ the License. You may obtain a copy of the License at
9
+
10
+ http://www.apache.org/licenses/LICENSE-2.0
11
+
12
+ Unless required by applicable law or agreed to in writing, software
13
+ distributed under the License is distributed on an "AS IS" BASIS,
14
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ See the License for the specific language governing permissions and
16
+ limitations under the License.
17
+ -->
18
+ <document xmlns="http://maven.apache.org/XDOC/2.0"
19
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
20
+ xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 https://maven.apache.org/xsd/xdoc-2.0.xsd">
21
+ <properties>
22
+ <title>Known Limitations and Problems</title>
23
+ </properties>
24
+
25
+ <body>
26
+ <section name="General">
27
+ <p>
28
+ This page lists the known limitations and problems of Apache
29
+ Commons Compress&#x2122; grouped by the archiving/compression
30
+ format they apply to.
31
+ </p>
32
+ <ul>
33
+ <li>Several implementations of decompressors and unarchivers will
34
+ invoke <a
35
+ href="https://docs.oracle.com/javase/10/docs/api/java/io/InputStream.html#skip(long)"><code>skip</code></a>
36
+ on the underlying <code>InputStream</code> which may throw an
37
+ <code>IOException</code> in some stream implementations. One
38
+ known case where this happens is when using
39
+ <code>System.in</code> as input. If you encounter an
40
+ exception with a message like "Illegal seek" we recommend you
41
+ wrap your stream in a <code>SkipShieldingInputStream</code>
42
+ from our utils package before passing it to Compress.</li>
43
+ <li>Commons Compress prior to 1.21 cannot be built on JDK 14 or newer.</li>
44
+ </ul>
45
+ </section>
46
+
47
+ <section name="7Z">
48
+ <ul>
49
+ <li>the format requires the otherwise optional <a
50
+ href="https://tukaani.org/xz/java.html">XZ for Java</a>
51
+ library.</li>
52
+ <li>only <code>File</code>s are supported as input/output,
53
+ not streams. Starting with Compress 1.13
54
+ <code>SeekableByteChannel</code> is supported as well.</li>
55
+ <li>In Compress 1.7
56
+ <code>ArchiveStreamFactory</code> will not auto-detect 7z
57
+ archives, starting with 1.8 it will throw a
58
+ <code>StreamingNotSupportedException</code> when reading from
59
+ a 7z archive.</li>
60
+ <li>Encryption, solid compression and header compression
61
+ are only supported when reading archives</li>
62
+ <li>Commons Compress 1.12 and earlier didn't support writing
63
+ LZMA.</li>
64
+ <li>Several of the "methods" supported by 7z are not
65
+ implemented in Compress.</li>
66
+ <li>No support for writing multi-volume archives. Such
67
+ archives can be read by simply concatenating the parts, for
68
+ example by using
69
+ <code>MultiReadOnlySeekableByteChannel</code>.</li>
70
+ <li>Support for some BCJ filters and the DELTA filter has
71
+ been added with Compress 1.8. Because of a known bug in
72
+ version 1.4 of the <a
73
+ href="https://tukaani.org/xz/java.html">XZ for Java</a>
74
+ library, archives using BCJ filters will cause an
75
+ <code>AssertionError</code> when read. If you need support
76
+ for BCJ filters you must use XZ for Java 1.5 or later.</li>
77
+ </ul>
78
+ </section>
79
+ <section name="AR">
80
+ <ul>
81
+ <li>AR archives can not contain directories - this is a
82
+ limitation of the format rather than one of Compress'
83
+ implementation.</li>
84
+ <li>file names longer than 16 characters are only fully
85
+ supported using the BSD dialect, the GNU/SRV4 dialect is only
86
+ supported when reading archives.</li>
87
+ </ul>
88
+ </section>
89
+ <section name="ARJ">
90
+ <ul>
91
+ <li>read-only support</li>
92
+ <li>no support for compression, encryption or multi-volume
93
+ archives</li>
94
+ </ul>
95
+ </section>
96
+ <section name="Brotli">
97
+ <ul>
98
+ <li>the format requires the otherwise optional <a
99
+ href="https://github.com/google/brotli">Google Brotli dec</a>
100
+ library.</li>
101
+ <li>read-only support</li>
102
+ <li><code>CompressorStreamFactory</code> is not able to auto-detect
103
+ streams using Brotli compression.</li>
104
+ </ul>
105
+ </section>
106
+ <section name="BZIP2">
107
+ <p>Versions of Compress prior to 1.4.1 are vulnerable to a
108
+ possible denial of service attack, see the <a
109
+ href="security.html">Security Reports</a> page for details.</p>
110
+ </section>
111
+ <section name="CPIO">
112
+ <p>We are not aware of any problems.</p>
113
+ </section>
114
+ <section name="DEFLATE">
115
+ <ul>
116
+ <li><code>CompressorStreamFactory</code> is not able to auto-detect
117
+ streams using DEFLATE compression.</li>
118
+ </ul>
119
+ </section>
120
+ <section name="DEFLATE64">
121
+ <ul>
122
+ <li><code>CompressorStreamFactory</code> is not able to auto-detect
123
+ streams using DEFLATE64 compression.</li>
124
+ <li>read-only support</li>
125
+ </ul>
126
+ </section>
127
+ <section name="DUMP">
128
+ <ul>
129
+ <li>read-only support</li>
130
+ <li>only the new-fs format is supported</li>
131
+ <li>the only compression algorithm supported is zlib</li>
132
+ </ul>
133
+ </section>
134
+ <section name="GZIP">
135
+ <p>We are not aware of any problems.</p>
136
+ </section>
137
+ <section name="JAR">
138
+ <p>JAR archives are special ZIP archives, all limitations of <a
139
+ href="#ZIP">ZIP</a> apply to JAR as well.</p>
140
+ <ul>
141
+ <li><code>ArchiveStreamFactory</code> cannot tell JAR
142
+ archives from ZIP archives and will not auto-detect
143
+ JARs.</li>
144
+ <li>Compress doesn't provide special access to the archive's
145
+ MANIFEST</li>
146
+ </ul>
147
+ </section>
148
+ <section name="LZ4">
149
+ <ul>
150
+ <li>In theory LZ4 compressed streams can contain literals and
151
+ copies of arbitrary length while Commons Compress only
152
+ supports sizes up to 2<sup>63</sup> - 1 (i.e. &#x2248; 9.2
153
+ EB).</li>
154
+ </ul>
155
+ </section>
156
+ <section name="LZMA">
157
+ <ul>
158
+ <li>the format requires the otherwise optional <a
159
+ href="https://tukaani.org/xz/java.html">XZ for Java</a>
160
+ library.</li>
161
+ <li>Commons Compress 1.12 and earlier only support reading
162
+ the format</li>
163
+ </ul>
164
+ </section>
165
+ <section name="PACK200">
166
+ <ul>
167
+ <li><p>Pack200 support in Commons Compress prior to 1.21 relies on the
168
+ <code>Pack200</code> class of the Java classlib. Java 14
169
+ removed support and thus Pack200 will not work at all when
170
+ running on Java 14 or later.</p>
171
+ <p>Starting with Commons Compress 1.21 the classlib
172
+ implementation is no longer used at all, instead Commons
173
+ Compress contains the pack200 code of the retired Apache
174
+ Harmony&#x2122; project.</p></li>
175
+ </ul>
176
+ </section>
177
+ <section name="SNAPPY">
178
+ <ul>
179
+ <li>Commons Compress 1.13 and earlier only support reading
180
+ the format</li>
181
+ </ul>
182
+ </section>
183
+ <section name="TAR">
184
+ <ul>
185
+ <li>sparse files could not be read in version prior to
186
+ Compress 1.20</li>
187
+ <li>sparse files can not be written</li>
188
+ <li>only a subset of the GNU and POSIX extensions are
189
+ supported</li>
190
+ <li>In Compress 1.6 <code>TarArchiveInputStream</code> could
191
+ fail to read the full contents of an entry unless the stream
192
+ was wrapped in a buffering stream.</li>
193
+ </ul>
194
+ </section>
195
+ <section name="XZ">
196
+ <ul>
197
+ <li>the format requires the otherwise optional <a
198
+ href="https://tukaani.org/xz/java.html">XZ for Java</a>
199
+ library.</li>
200
+ </ul>
201
+ </section>
202
+ <section name="Z">
203
+ <ul>
204
+ <li>Prior to Compress 1.8.1
205
+ <code>CompressorStreamFactory</code> was not able to
206
+ auto-detect streams using .Z compression.</li>
207
+ <li>read-only support</li>
208
+ </ul>
209
+ </section>
210
+ <section name="ZIP">
211
+ <ul>
212
+ <li><code>ZipArchiveInputStream</code> is limited and may
213
+ even return false contents in some cases, use
214
+ <code>ZipFile</code> whenever possible. See <a
215
+ href="zip.html#ZipArchiveInputStream_vs_ZipFile">the ZIP
216
+ documentation page</a> for details. This limitation is a
217
+ result of streaming data vs using random access and not a
218
+ limitation of Compress' specific implementation.</li>
219
+ <li>only a subset of compression methods are supported,
220
+ including the most common STORED and DEFLATEd. IMPLODE,
221
+ SHRINK, DEFLATE64 and BZIP2 support is read-only.</li>
222
+ <li>no support for encryption</li>
223
+ <li>no support for multi-volume archives prior to Compress 1.20</li>
224
+ <li>It is currently not possible to write split archives with
225
+ more than 64k segments. When creating split archives with more
226
+ than 100 segments you will need to adjust the file names as
227
+ <code>ZipArchiveOutputStream</code> assumes extensions will be
228
+ three characters long.</li>
229
+ <li>In versions prior to Compress 1.6
230
+ <code>ZipArchiveEntries</code> read from an archive will
231
+ contain non-zero millisecond values when using Java 8 or later rather
232
+ than the expected two-second granularity.</li>
233
+ <li>Compress 1.7 has a known bug where the very first entry
234
+ of an archive will not be read correctly by
235
+ <code>ZipArchiveInputStream</code> if it used the STORED
236
+ method.</li>
237
+ <li><code>ZipArchiveEntry#getLastModifiedDate</code> uses
238
+ <code>ZipEntry#getTime</code> under the covers which may
239
+ return different times for the same archive when using
240
+ different versions of Java.</li>
241
+ <li>In versions of Compress prior to 1.16 a specially crafted
242
+ ZIP archive can be used to cause an infinite loop inside of
243
+ Compress' extra field parser used by the <code>ZipFile</code>
244
+ and <code>ZipArchiveInputStream</code> classes. This can be
245
+ used to mount a denial of service attack against services
246
+ that use Compress' zip package. See the <a
247
+ href="security.html">Security Reports</a> page for
248
+ details.</li>
249
+ </ul>
250
+ </section>
251
+ <section name="Zstandard">
252
+ <ul>
253
+ <li>the format requires the otherwise optional <a
254
+ href="https://github.com/luben/zstd-jni">Zstandard JNI</a>
255
+ library.</li>
256
+ </ul>
257
+ </section>
258
+ </body>
259
+ </document>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/site/xdoc/security.xml ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <!--
3
+
4
+ Licensed to the Apache Software Foundation (ASF) under one or more
5
+ contributor license agreements. See the NOTICE file distributed with
6
+ this work for additional information regarding copyright ownership.
7
+ The ASF licenses this file to You under the Apache License, Version 2.0
8
+ (the "License"); you may not use this file except in compliance with
9
+ the License. You may obtain a copy of the License at
10
+
11
+ http://www.apache.org/licenses/LICENSE-2.0
12
+
13
+ Unless required by applicable law or agreed to in writing, software
14
+ distributed under the License is distributed on an "AS IS" BASIS,
15
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ See the License for the specific language governing permissions and
17
+ limitations under the License.
18
+ -->
19
+ <document xmlns="http://maven.apache.org/XDOC/2.0"
20
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
21
+ xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 https://maven.apache.org/xsd/xdoc-2.0.xsd">
22
+ <properties>
23
+ <title>Commons Compress Security Reports</title>
24
+ <author email="dev@commons.apache.org">Commons Team</author>
25
+ </properties>
26
+ <body>
27
+ <section name="General Information">
28
+ <p>For information about reporting or asking questions about
29
+ security problems, please see the <a
30
+ href="https://commons.apache.org/security.html">security page
31
+ of the Commons project</a>.</p>
32
+ </section>
33
+
34
+ <section name="Apache Commons Compress Security Vulnerabilities">
35
+ <p>This page lists all security vulnerabilities fixed in
36
+ released versions of Apache Commons Compress. Each
37
+ vulnerability is given a security impact rating by the
38
+ development team - please note that this rating may vary from
39
+ platform to platform. We also list the versions of Commons
40
+ Compress the flaw is known to affect, and where a flaw has not
41
+ been verified list the version with a question mark.</p>
42
+
43
+ <p>Please note that binary patches are never provided. If you
44
+ need to apply a source code patch, use the building
45
+ instructions for the Commons Compress version that you are
46
+ using.</p>
47
+
48
+ <p>If you need help on building Commons Compress or other help
49
+ on following the instructions to mitigate the known
50
+ vulnerabilities listed here, please send your questions to the
51
+ public <a href="mail-lists.html">Compress Users mailing
52
+ list</a>.</p>
53
+
54
+ <p>If you have encountered an unlisted security vulnerability
55
+ or other unexpected behavior that has security impact, or if
56
+ the descriptions here are incomplete, please report them
57
+ privately to the Apache Security Team. Thank you.</p>
58
+
59
+ <subsection name="Fixed in Apache Commons Compress 1.26.0">
60
+ <p><b>Important: Denial of Service</b> <a
61
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-25710">CVE-2024-25710</a></p>
62
+ <p>This affects version 1.3 through 1.25.0.</p>
63
+ <p>This denial of service is caused by an infinite loop reading a corrupted DUMP file.</p>
64
+ <p>Users are recommended to upgrade to version 1.26.0 which fixes the issue.</p>
65
+ <p>Credit to Yakov Shafranovich, Amazon Web Services (reporter).</p>
66
+
67
+ <p><b>Moderate: Denial of Service</b> <a
68
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-26308">CVE-2024-26308</a></p>
69
+ <p>You can get an OutOfMemoryError unpacking a broken Pack200 file.</p>
70
+ <p>This issue affects Commons Compress 1.21 before 1.26.0.</p>
71
+ <p>Users are recommended to upgrade to version 1.26.0 which fixes the issue.</p>
72
+ <p>Credit to Yakov Shafranovich, Amazon Web Services (reporter).</p>
73
+ </subsection>
74
+
75
+ <subsection name="Fixed in Apache Commons Compress 1.24.0">
76
+ <p><b>Moderate: Denial of Service</b> <a
77
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-42503">CVE-2023-42503</a></p>
78
+
79
+ <p>Improper Input Validation, Uncontrolled Resource Consumption vulnerability in Apache Commons Compress in TAR parsing.</p>
80
+ <p>This issue affects Apache Commons Compress: from 1.22 before 1.24.0.</p>
81
+ <p>Users are recommended to upgrade to version 1.24.0, which fixes the issue.</p>
82
+ <p>A third party can create a malformed TAR file by manipulating file modification times headers,
83
+ which when parsed with Apache Commons Compress, will cause a denial of service issue via CPU consumption.</p>
84
+ <p>In version 1.22 of Apache Commons Compress, support was added for file modification times with higher precision
85
+ (issue # COMPRESS-612<sup><a href="#Ref-1-24-1">[1]</a></sup>).
86
+ The format for the PAX extended headers carrying this data consists of two numbers separated by a period<sup><a href="#Ref-1-24-2">[2]</a></sup>,
87
+ indicating seconds and subsecond precision (for example “1647221103.5998539”). The impacted fields are “atime”, “ctime”, “mtime” and
88
+ “LIBARCHIVE.creationtime”. No input validation is performed prior to the parsing of header values.</p>
89
+ <p>Parsing of these numbers uses the BigDecimal<sup><a href="#Ref-1-24-3">[3]</a></sup> class from the JDK which has a publicly known algorithmic complexity issue when doing
90
+ operations on large numbers, causing denial of service (see issue # JDK-6560193<sup><a href="#Ref-1-24-4">[4]</a></sup>). A third party can manipulate file time headers
91
+ in a TAR file by placing a number with a very long fraction (300,000 digits) or a number with exponent notation (such as “9e9999999”)
92
+ within a file modification time header, and the parsing of files with these headers will take hours instead of seconds, leading to a
93
+ denial of service via exhaustion of CPU resources. This issue is similar to CVE-2012-2098<sup><a href="#Ref-1-24-5">[5]</a></sup>.</p>
94
+ <ul>
95
+ <li id="Ref-1-24-1">[1]: <a href="https://issues.apache.org/jira/browse/COMPRESS-612">COMPRESS-612</a></li>
96
+ <li id="Ref-1-24-2">[2]: <a href="https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_05">PAX extended headers</a></li>
97
+ <li id="Ref-1-24-3">[3]: <a href="https://docs.oracle.com/javase/8/docs/api/java/math/BigDecimal.html">BigDecimal</a></li>
98
+ <li id="Ref-1-24-4">[4]: <a href="https://bugs.openjdk.org/browse/JDK-6560193">JDK-6560193</a></li>
99
+ <li id="Ref-1-24-5">[5]: <a href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2012-2098">CVE-2012-2098</a></li>
100
+ </ul>
101
+ <p>Only applications using CompressorStreamFactory class (with auto-detection of file types), TarArchiveInputStream and TarFile
102
+ classes to parse TAR files are impacted. Since this code was introduced in v1.22, only that version and later versions are impacted.</p>
103
+ </subsection>
104
+
105
+ <subsection name="Fixed in Apache Commons Compress 1.21">
106
+ <p><b>Low: Denial of Service</b> <a
107
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-35515">CVE-2021-35515</a></p>
108
+
109
+ <p>When reading a specially crafted 7Z archive, the construction of the
110
+ list of codecs that decompress an entry can result in an infinite
111
+ loop. This could be used to mount a denial of service attack against
112
+ services that use Compress' sevenz package.</p>
113
+
114
+ <p>This was fixed in revision <a
115
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=3fe6b42110dc56d0d6fe0aaf80cfecb8feea5321">3fe6b42</a>.</p>
116
+
117
+ <p>This issue was discovered by OSS Fuzz.</p>
118
+
119
+ <p>Affects: 1.6 - 1.20</p>
120
+
121
+ <p><b>Low: Denial of Service</b> <a
122
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-35516">CVE-2021-35516</a></p>
123
+
124
+ <p>When reading a specially crafted 7Z archive, Compress can be made to
125
+ allocate large amounts of memory that finally leads to an out of memory
126
+ error even for very small inputs. This could be used to mount a denial
127
+ of service attack against services that use Compress' sevenz package.</p>
128
+
129
+ <p>This was fixed in revisions
130
+ <a
131
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=26924e96c7730db014c310757e11c9359db07f3e">26924e9</a>,
132
+ <a
133
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=c51de6cfaec75b21566374158f25e1734c3a94cb">c51de6c</a>,
134
+ <a
135
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=0aba8b8fd8053ae323f15d736d1762b2161c76a6">0aba8b8</a>,
136
+ <a
137
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=60d551a748236d7f4651a4ae88d5a351f7c5754b">60d551a</a>,
138
+ <a
139
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=bf5a5346ae04b9d2a5b0356ca75f11dcc8d94789">bf5a534</a>,
140
+ <a
141
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=5761493cbaf7a7d608a3b68f4d61aaa822dbeb4f">5761493</a>,
142
+ and <a
143
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=ae2b27cc011f47f0289cb24a11f2d4f1db711f8a">ae2b27c</a>
144
+ .</p>
145
+
146
+ <p>This issue was first reported to the project's issue tracker as
147
+ <a href="https://issues.apache.org/jira/browse/COMPRESS-542">COMPRESS-542</a>
148
+ by Robin Schimpf.
149
+ Later OSS Fuzz detected ways to exploit this issue which managed to
150
+ escape the initial attempt to fix it.</p>
151
+
152
+ <p>Affects: 1.6 - 1.20</p>
153
+
154
+ <p><b>Low: Denial of Service</b> <a
155
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-35517">CVE-2021-35517</a></p>
156
+
157
+ <p>When reading a specially crafted TAR archive, Compress
158
+ can be made to allocate large amounts of memory that finally
159
+ leads to an out of memory error even for very small
160
+ inputs. This could be used to mount a denial of service
161
+ attack against services that use Compress' tar package.</p>
162
+
163
+ <p>This was fixed in revisions
164
+ <a
165
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=d0af873e77d16f41edfef7b69da5c8c35c96a650">d0af873</a>,
166
+ <a
167
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=7ce1b0796d6cbe1f41b969583bd49f33ae0efef0">7ce1b07</a>
168
+ and <a
169
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=80124dd9fe4b0a0b2e203ca19aacac8cd0afc96f">80124dd</a>.</p>
170
+
171
+ <p>This issue was discovered by OSS Fuzz.</p>
172
+
173
+ <p>Affects: 1.1 - 1.20</p>
174
+
175
+ <p><b>Low: Denial of Service</b> <a
176
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-36090">CVE-2021-36090</a></p>
177
+
178
+ <p>When reading a specially crafted ZIP archive, Compress
179
+ can be made to allocate large amounts of memory that finally
180
+ leads to an out of memory error even for very small
181
+ inputs. This could be used to mount a denial of service
182
+ attack against services that use Compress' zip package.</p>
183
+
184
+ <p>This was fixed in revisions
185
+ <a
186
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=ef5d70b625000e38404194aaab311b771c44efda">ef5d70b</a>
187
+ and <a
188
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commit;h=80124dd9fe4b0a0b2e203ca19aacac8cd0afc96f">80124dd</a>.</p>
189
+
190
+ <p>This issue was discovered by OSS Fuzz.</p>
191
+
192
+ <p>Affects: 1.0 - 1.20</p>
193
+
194
+ </subsection>
195
+
196
+ <subsection name="Fixed in Apache Commons Compress 1.19">
197
+ <p><b>Low: Denial of Service</b> <a
198
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-12402">CVE-2019-12402</a></p>
199
+
200
+ <p>The file name encoding algorithm used internally in Apache Commons
201
+ Compress can get into an infinite loop when faced with specially
202
+ crafted inputs. This can lead to a denial of service attack if an
203
+ attacker can choose the file names inside of an archive created by
204
+ Compress.</p>
205
+
206
+ <p>This was fixed in revision <a
207
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=commitdiff;h=4ad5d80a6272e007f64a6ac66829ca189a8093b9;hp=16a0c84e84b93cc8c107b7ff3080bd11317ab581">4ad5d80a</a>.</p>
208
+
209
+ <p>This was first reported to the Commons Security Team on 22 August
210
+ 2019 and made public on 27 August 2019.</p>
211
+
212
+ <p>Affects: 1.15 - 1.18</p>
213
+
214
+ </subsection>
215
+
216
+ <subsection name="Fixed in Apache Commons Compress 1.18">
217
+ <p><b>Low: Denial of Service</b> <a
218
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11771">CVE-2018-11771</a></p>
219
+
220
+ <p>When reading a specially crafted ZIP archive, the read
221
+ method of <code>ZipArchiveInputStream</code> can fail to
222
+ return the correct EOF indication after the end of the
223
+ stream has been reached. When combined with a
224
+ <code>java.io.InputStreamReader</code> this can lead to an
225
+ infinite stream, which can be used to mount a denial of
226
+ service attack against services that use Compress' zip
227
+ package</p>
228
+
229
+ <p>This was fixed in revision <a
230
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=blobdiff;f=src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java;h=e1995d7aa51dfac6ae933987fb0b7760c607582b;hp=0a2c1aa0063c620c867715119eae2013c87b5e70;hb=a41ce6892cb0590b2e658704434ac0dbcb6834c8;hpb=64ed6dde03afbef6715fdfdeab5fc04be6192899">a41ce68</a>.</p>
231
+
232
+ <p>This was first reported to the Security Team on 14 June
233
+ 2018 and made public on 16 August 2018.</p>
234
+
235
+ <p>Affects: 1.7 - 1.17</p>
236
+
237
+ </subsection>
238
+
239
+ <subsection name="Fixed in Apache Commons Compress 1.16">
240
+ <p><b>Low: Denial of Service</b> <a
241
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-1324">CVE-2018-1324</a></p>
242
+
243
+ <p>A specially crafted ZIP archive can be used to cause an
244
+ infinite loop inside of Compress' extra field parser used by
245
+ the <code>ZipFile</code> and
246
+ <code>ZipArchiveInputStream</code> classes. This can be
247
+ used to mount a denial of service attack against services
248
+ that use Compress' zip package.</p>
249
+
250
+ <p>This was fixed in revision <a
251
+ href="https://gitbox.apache.org/repos/asf?p=commons-compress.git;a=blobdiff;f=src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java;h=acc3b22346b49845e85b5ef27a5814b69e834139;hp=0feb9c98cc622cde1defa3bbd268ef82b4ae5c18;hb=2a2f1dc48e22a34ddb72321a4db211da91aa933b;hpb=dcb0486fb4cb2b6592c04d6ec2edbd3f690df5f2">2a2f1dc4</a>.</p>
252
+
253
+ <p>This was first reported to the project's JIRA on <a
254
+ href="https://issues.apache.org/jira/browse/COMPRESS-432">19
255
+ December 2017</a>.</p>
256
+
257
+ <p>Affects: 1.11 - 1.15</p>
258
+
259
+ </subsection>
260
+
261
+ <subsection name="Fixed in Apache Commons Compress 1.4.1">
262
+ <p><b>Low: Denial of Service</b> <a
263
+ href="https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2012-2098">CVE-2012-2098</a></p>
264
+
265
+ <p>The bzip2 compressing streams in Apache Commons Compress
266
+ internally use sorting algorithms with unacceptable
267
+ worst-case performance on very repetitive inputs. A
268
+ specially crafted input to Compress'
269
+ <code>BZip2CompressorOutputStream</code> can be used to make
270
+ the process spend a very long time while using up all
271
+ available processing time effectively leading to a denial of
272
+ service.</p>
273
+
274
+ <p>This was fixed in revisions
275
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1332540">1332540</a>,
276
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1332552">1332552</a>,
277
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1333522">1333522</a>,
278
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1337444">1337444</a>,
279
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340715">1340715</a>,
280
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340723">1340723</a>,
281
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340757">1340757</a>,
282
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340786">1340786</a>,
283
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340787">1340787</a>,
284
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340790">1340790</a>,
285
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340795">1340795</a> and
286
+ <a href="https://svn.apache.org/viewvc?view=revision&amp;revision=1340799">1340799</a>.</p>
287
+
288
+ <p>This was first reported to the Security Team on 12 April
289
+ 2012 and made public on 23 May 2012.</p>
290
+
291
+ <p>Affects: 1.0 - 1.4</p>
292
+
293
+ </subsection>
294
+ </section>
295
+
296
+ <section name="Errors and Omissions">
297
+ <p>Please report any errors or omissions to <a
298
+ href="mail-lists.html">the dev mailing list</a>.</p>
299
+ </section>
300
+ </body>
301
+ </document>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-379.jar ADDED
Binary file (222 Bytes). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-382 ADDED
Binary file (19 Bytes). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-386 ADDED
@@ -0,0 +1 @@
 
 
1
+ �B
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla-multi.7z.001 ADDED
Binary file (512 Bytes). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla.pack ADDED
Binary file (1.65 kB). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/bla.unix.arj ADDED
Binary file (257 Bytes). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/longfile_bsd.ar ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ !<arch>
2
+ #1/28 1311256511 1000 1000 100644 42 `
3
+ this_is_a_long_file_name.txtHello, world!
4
+ #1/36 1454694016 1000 1000 100664 40 `
5
+ this_is_a_long_file_name_as_well.txtBye
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/longfile_gnu.ar ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ !<arch>
2
+ // 68 `
3
+ this_is_a_long_file_name.txt/
4
+ this_is_a_long_file_name_as_well.txt/
5
+ /0 1454693980 1000 1000 100664 14 `
6
+ Hello, world!
7
+ /30 1454694016 1000 1000 100664 4 `
8
+ Bye
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test with spaces.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ TEST WITH SPACES IN FILENAME
2
+ 111111111111111111111111111000101011
3
+ 111111111111111111111111111000101011
4
+ 111111111111111111111111111000101011
5
+ 111111111111111111111111111000101011
6
+ 111111111111111111111111111000101011
7
+ 111111111111111111111111111000101011
8
+ 111111111111111111111111111000101011
9
+ 111111111111111111111111111000101011
10
+ 111111111111111111111111111000101011
11
+ 111111111111111111111111111000101011
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test1.xml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ <?xml version = '1.0'?>
2
+ <!DOCTYPE connections>
3
+ <connections>
4
+ </connections>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test3.xml ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version = '1.0'?>
2
+ <!DOCTYPE connections>
3
+ <text>
4
+ Lorem ipsum dolor sit amet, consetetur sadipscing elitr,
5
+ sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat,
6
+ sed diam voluptua.
7
+ At vero eos et accusam et justo duo dolores et ea rebum.
8
+ Stet clita kasd gubergren, no sea takimata sanctus est
9
+ Lorem ipsum dolor sit amet.
10
+ </text>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/test4.xml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <?xml version = '1.0'?>
2
+ <!DOCTYPE connections>
3
+ <connections>
4
+ German Umlauts: ÜÄÖß
5
+ Stored as UTF-8 (Mac OSX 10.4.x)
6
+ </connections>
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/testAIFF.aif ADDED
Binary file (3.89 kB). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/testCompress209.doc ADDED
Binary file (23.6 kB). View file
 
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/zipbomb.xlsx ADDED
Binary file (18.3 kB). View file