Skip to content

Commit 9a743bd

Browse files
authored
HADOOP-19315. Upgrade Apache Avro to 1.11.4 (#7128)
* All field access is now via setter/getter methods * To use Avro to marshal Serializable objects, the packages they are in must be declared in the system property "org.apache.avro.SERIALIZABLE_PACKAGES" This is required to address - CVE-2024-47561 - CVE-2023-39410 This change is not backwards compatible. Contributed by Dominik Diedrich
1 parent 9657276 commit 9a743bd

File tree

12 files changed

+93
-28
lines changed

12 files changed

+93
-28
lines changed

LICENSE-binary

+1-1
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ io.swagger:swagger-annotations:1.5.4
297297
javax.inject:javax.inject:1
298298
net.java.dev.jna:jna:5.2.0
299299
net.minidev:accessors-smart:1.2
300-
org.apache.avro:avro:1.9.2
300+
org.apache.avro:avro:1.11.4
301301
org.apache.avro:avro:1.11.3
302302
org.apache.commons:commons-compress:1.26.1
303303
org.apache.commons:commons-configuration2:2.10.1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.constants;
20+
21+
/**
22+
* Evolving config constants class used in various hadoop tests.
23+
*/
24+
public final class ConfigConstants {
25+
26+
private ConfigConstants() {}
27+
28+
/**
29+
* System property name for the avro dependency.
30+
* This property is used to configure trusted packages,
31+
* which the avro dependency can use for serialization.
32+
*/
33+
public static final String CONFIG_AVRO_SERIALIZABLE_PACKAGES =
34+
"org.apache.avro.SERIALIZABLE_PACKAGES";
35+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
/**
20+
* Evolving config constants class used in various hadoop tests.
21+
*/
22+
package org.apache.hadoop.constants;

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java

+4
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
*/
1818

1919
package org.apache.hadoop.fs;
20+
2021
import org.junit.Assert;
2122
import org.junit.Test;
2223

@@ -30,6 +31,7 @@
3031
import java.util.Arrays;
3132

3233
import org.apache.hadoop.conf.Configuration;
34+
import org.apache.hadoop.constants.ConfigConstants;
3335
import org.apache.hadoop.io.AvroTestUtil;
3436
import org.apache.hadoop.test.GenericTestUtils;
3537
import org.apache.hadoop.util.Shell;
@@ -404,6 +406,8 @@ public void testGetName() {
404406

405407
@Test (timeout = 30000)
406408
public void testAvroReflect() throws Exception {
409+
// Avro expects explicitely stated, trusted packages used for (de-)serialization
410+
System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.fs");
407411
AvroTestUtil.testReflect
408412
(new Path("foo"),
409413
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java

+4
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
import java.nio.charset.CharacterCodingException;
2525
import java.nio.charset.StandardCharsets;
2626
import java.util.Random;
27+
28+
import org.apache.hadoop.constants.ConfigConstants;
2729
import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
2830
import org.junit.Test;
2931

@@ -344,6 +346,8 @@ public void testConcurrentEncodeDecode() throws Exception{
344346

345347
@Test
346348
public void testAvroReflect() throws Exception {
349+
// Avro expects explicitely stated, trusted packages used for (de-)serialization
350+
System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.io");
347351
AvroTestUtil.testReflect
348352
(new Text("foo"),
349353
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}");

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public class TestAvroSerialization {
3333
@Test
3434
public void testSpecific() throws Exception {
3535
AvroRecord before = new AvroRecord();
36-
before.intField = 5;
36+
before.setIntField(5);
3737
AvroRecord after = SerializationTestUtil.testSerialization(conf, before);
3838
assertEquals(before, after);
3939
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import java.util.Set;
2222

23-
import org.apache.avro.util.Utf8;
2423
import org.apache.hadoop.mapreduce.JobID;
2524
import org.apache.hadoop.util.StringUtils;
2625
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@@ -31,8 +30,8 @@ public class JobQueueChangeEvent implements HistoryEvent {
3130
private JobQueueChange datum = new JobQueueChange();
3231

3332
public JobQueueChangeEvent(JobID id, String queueName) {
34-
datum.jobid = new Utf8(id.toString());
35-
datum.jobQueueName = new Utf8(queueName);
33+
datum.setJobid(id.toString());
34+
datum.setJobQueueName(queueName);
3635
}
3736

3837
JobQueueChangeEvent() { }
@@ -54,13 +53,14 @@ public void setDatum(Object datum) {
5453

5554
/** Get the Job ID */
5655
public JobID getJobId() {
57-
return JobID.forName(datum.jobid.toString());
56+
return JobID.forName(datum.getJobid().toString());
5857
}
5958

6059
/** Get the new Job queue name */
6160
public String getJobQueueName() {
62-
if (datum.jobQueueName != null) {
63-
return datum.jobQueueName.toString();
61+
java.lang.CharSequence jobQueueName = datum.getJobQueueName();
62+
if (jobQueueName != null) {
63+
return jobQueueName.toString();
6464
}
6565
return null;
6666
}

hadoop-project/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@
6363
<java.security.egd>file:///dev/urandom</java.security.egd>
6464

6565
<!-- avro version -->
66-
<avro.version>1.9.2</avro.version>
66+
<avro.version>1.11.4</avro.version>
6767

6868
<!-- jersey version -->
6969
<jersey.version>1.19.4</jersey.version>

hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

+8-8
Original file line numberDiff line numberDiff line change
@@ -460,7 +460,7 @@ private void processTaskFinishedEvent(TaskFinishedEvent event) {
460460
}
461461
task.setFinishTime(event.getFinishTime());
462462
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
463-
task.incorporateCounters(((TaskFinished) event.getDatum()).counters);
463+
task.incorporateCounters(((TaskFinished) event.getDatum()).getCounters());
464464
}
465465

466466
private void processTaskFailedEvent(TaskFailedEvent event) {
@@ -472,7 +472,7 @@ private void processTaskFailedEvent(TaskFailedEvent event) {
472472
task.setFinishTime(event.getFinishTime());
473473
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
474474
TaskFailed t = (TaskFailed)(event.getDatum());
475-
task.putDiagnosticInfo(t.error.toString());
475+
task.putDiagnosticInfo(t.getError().toString());
476476
// killed task wouldn't have failed attempt.
477477
if (t.getFailedDueToAttempt() != null) {
478478
task.putFailedDueToAttemptId(t.getFailedDueToAttempt().toString());
@@ -542,7 +542,7 @@ private void processTaskAttemptFinishedEvent(TaskAttemptFinishedEvent event) {
542542
}
543543
attempt.setFinishTime(event.getFinishTime());
544544
attempt
545-
.incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters);
545+
.incorporateCounters(((TaskAttemptFinished) event.getDatum()).getCounters());
546546
}
547547

548548
private void processReduceAttemptFinishedEvent(
@@ -568,7 +568,7 @@ private void processReduceAttemptFinishedEvent(
568568
attempt.setShuffleFinished(event.getShuffleFinishTime());
569569
attempt.setSortFinished(event.getSortFinishTime());
570570
attempt
571-
.incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters);
571+
.incorporateCounters(((ReduceAttemptFinished) event.getDatum()).getCounters());
572572
attempt.arraySetClockSplits(event.getClockSplits());
573573
attempt.arraySetCpuUsages(event.getCpuUsages());
574574
attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -596,7 +596,7 @@ private void processMapAttemptFinishedEvent(MapAttemptFinishedEvent event) {
596596
// is redundant, but making this will add future-proofing.
597597
attempt.setFinishTime(event.getFinishTime());
598598
attempt
599-
.incorporateCounters(((MapAttemptFinished) event.getDatum()).counters);
599+
.incorporateCounters(((MapAttemptFinished) event.getDatum()).getCounters());
600600
attempt.arraySetClockSplits(event.getClockSplits());
601601
attempt.arraySetCpuUsages(event.getCpuUsages());
602602
attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -661,11 +661,11 @@ private void processJobFinishedEvent(JobFinishedEvent event) {
661661

662662
JobFinished job = (JobFinished)event.getDatum();
663663
Map<String, Long> countersMap =
664-
JobHistoryUtils.extractCounters(job.totalCounters);
664+
JobHistoryUtils.extractCounters(job.getTotalCounters());
665665
result.putTotalCounters(countersMap);
666-
countersMap = JobHistoryUtils.extractCounters(job.mapCounters);
666+
countersMap = JobHistoryUtils.extractCounters(job.getMapCounters());
667667
result.putMapCounters(countersMap);
668-
countersMap = JobHistoryUtils.extractCounters(job.reduceCounters);
668+
countersMap = JobHistoryUtils.extractCounters(job.getReduceCounters());
669669
result.putReduceCounters(countersMap);
670670
}
671671

hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -157,9 +157,9 @@ static boolean isJobConfXml(String fileName) {
157157
static Map<String, Long> extractCounters(JhCounters counters) {
158158
Map<String, Long> countersMap = new HashMap<String, Long>();
159159
if (counters != null) {
160-
for (JhCounterGroup group : counters.groups) {
161-
for (JhCounter counter : group.counts) {
162-
countersMap.put(counter.name.toString(), counter.value);
160+
for (JhCounterGroup group : counters.getGroups()) {
161+
for (JhCounter counter : group.getCounts()) {
162+
countersMap.put(counter.getName().toString(), counter.getValue());
163163
}
164164
}
165165
}

hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -268,11 +268,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters,
268268
String counterName) {
269269
counterName = canonicalizeCounterName(counterName);
270270

271-
for (JhCounterGroup group : counters.groups) {
272-
for (JhCounter counter : group.counts) {
271+
for (JhCounterGroup group : counters.getGroups()) {
272+
for (JhCounter counter : group.getCounts()) {
273273
if (counterName
274-
.equals(canonicalizeCounterName(counter.name.toString()))) {
275-
thunk.set(counter.value);
274+
.equals(canonicalizeCounterName(counter.getName().toString()))) {
275+
thunk.set(counter.getValue());
276276
return;
277277
}
278278
}

hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -636,11 +636,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters,
636636
String counterName) {
637637
counterName = canonicalizeCounterName(counterName);
638638

639-
for (JhCounterGroup group : counters.groups) {
640-
for (JhCounter counter : group.counts) {
639+
for (JhCounterGroup group : counters.getGroups()) {
640+
for (JhCounter counter : group.getCounts()) {
641641
if (counterName
642-
.equals(canonicalizeCounterName(counter.name.toString()))) {
643-
thunk.set(counter.value);
642+
.equals(canonicalizeCounterName(counter.getName().toString()))) {
643+
thunk.set(counter.getValue());
644644
return;
645645
}
646646
}

0 commit comments

Comments
 (0)