123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418 |
- <!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
- <FindBugsFilter>
- <Match>
- <Package name="org.apache.hadoop.record.compiler.generated" />
- </Match>
- <Match>
- <Package name="org.apache.hadoop.security.proto" />
- </Match>
- <Match>
- <Package name="org.apache.hadoop.tools.proto" />
- </Match>
- <Match>
- <Bug pattern="EI_EXPOSE_REP" />
- </Match>
- <Match>
- <Bug pattern="EI_EXPOSE_REP2" />
- </Match>
- <Match>
- <Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
- </Match>
- <Match>
- <Class name="~.*_jsp" />
- <Bug pattern="DLS_DEAD_LOCAL_STORE" />
- </Match>
- <Match>
- <Field name="_jspx_dependants" />
- <Bug pattern="UWF_UNWRITTEN_FIELD" />
- </Match>
- <!--
- Inconsistent synchronization for Client.Connection.out is
- is intentional to make a connection to be closed instantly.
- -->
- <Match>
- <Class name="org.apache.hadoop.ipc.Client$Connection" />
- <Field name="ipcStreams" />
- <Bug pattern="IS2_INCONSISTENT_SYNC" />
- </Match>
- <!--
- Further SaslException should be ignored during cleanup and
- original exception should be re-thrown.
- -->
- <Match>
- <Class name="org.apache.hadoop.security.SaslRpcClient" />
- <Bug pattern="DE_MIGHT_IGNORE" />
- </Match>
- <!--
- Ignore Cross Scripting Vulnerabilities
- -->
- <Match>
- <Package name="~org.apache.hadoop.mapred.*" />
- <Bug code="XSS" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.mapred.taskdetails_jsp" />
- <Bug code="HRS" />
- </Match>
- <!--
- Ignore warnings where child class has the same name as
- super class. Classes based on Old API shadow names from
- new API. Should go off after HADOOP-1.0
- -->
- <Match>
- <Class name="~org.apache.hadoop.mapred.*" />
- <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
- </Match>
- <!--
- Ignore warnings for usage of System.exit. This is
- required and have been well thought out
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.Child$2" />
- <Method name="run" />
- <Bug pattern="DM_EXIT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.mapred.JobTracker" />
- <Method name="addHostToNodeMapping" />
- <Bug pattern="DM_EXIT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.mapred.Task" />
- <Or>
- <Method name="done" />
- <Method name="commit" />
- <Method name="statusUpdate" />
- </Or>
- <Bug pattern="DM_EXIT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.mapred.Task$TaskReporter" />
- <Method name="run" />
- <Bug pattern="DM_EXIT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.util.ProgramDriver" />
- <Method name="driver" />
- <Bug pattern="DM_EXIT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.util.RunJar" />
- <Method name="run" />
- <Bug pattern="DM_EXIT" />
- </Match>
- <!--
- We need to cast objects between old and new api objects
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.OutputCommitter" />
- <Bug pattern="BC_UNCONFIRMED_CAST" />
- </Match>
- <!--
- We intentionally do the get name from the inner class
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.TaskTracker$MapEventsFetcherThread" />
- <Method name="run" />
- <Bug pattern="IA_AMBIGUOUS_INVOCATION_OF_INHERITED_OR_OUTER_METHOD" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.mapred.FileOutputCommitter" />
- <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
- </Match>
- <!--
- Ignoring this warning as resolving this would need a non-trivial change in code
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor" />
- <Method name="configure" />
- <Field name="maxNumItems" />
- <Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
- </Match>
- <!--
- Comes from org.apache.jasper.runtime.ResourceInjector. Cannot do much.
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.jobqueue_005fdetails_jsp" />
- <Field name="_jspx_resourceInjector" />
- <Bug pattern="SE_BAD_FIELD" />
- </Match>
- <!--
- Storing textInputFormat and then passing it as a parameter. Safe to ignore.
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob" />
- <Method name="createValueAggregatorJob" />
- <Bug pattern="DLS_DEAD_STORE_OF_CLASS_LITERAL" />
- </Match>
- <!--
- Can remove this after the upgrade to findbugs1.3.8
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.lib.db.DBInputFormat" />
- <Method name="getSplits" />
- <Bug pattern="DLS_DEAD_LOCAL_STORE" />
- </Match>
- <!--
- This is a spurious warning. Just ignore
- -->
- <Match>
- <Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
- <Field name="kvindex" />
- <Bug pattern="IS2_INCONSISTENT_SYNC" />
- </Match>
- <!--
- core changes
- -->
- <Match>
- <Class name="~org.apache.hadoop.*" />
- <Bug code="MS" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.fs.FileSystem" />
- <Method name="checkPath" />
- <Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.io.Closeable" />
- <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.security.AccessControlException" />
- <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.record.meta.Utils" />
- <Method name="skip" />
- <Bug pattern="BC_UNCONFIRMED_CAST" />
- </Match>
- <!--
- The compareTo method is actually a dummy method that just
- throws excpetions. So, no need to override equals. Ignore
- -->
- <Match>
- <Class name="org.apache.hadoop.record.meta.RecordTypeInfo" />
- <Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.util.ProcfsBasedProcessTree" />
- <Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
- </Match>
- <!--
- Streaming, Examples
- -->
- <Match>
- <Class name="org.apache.hadoop.streaming.StreamUtil$TaskId" />
- <Bug pattern="URF_UNREAD_FIELD" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.examples.DBCountPageView" />
- <Method name="verify" />
- <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.examples.ContextFactory" />
- <Method name="setAttributes" />
- <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
- </Match>
- <!--
- TFile
- -->
- <Match>
- <Class name="org.apache.hadoop.io.file.tfile.Chunk$ChunkDecoder" />
- <Method name="close" />
- <Bug pattern="SR_NOT_CHECKED" />
- </Match>
- <!--
- The purpose of skip() is to drain remaining bytes of the chunk-encoded
- stream (one chunk at a time). The termination condition is checked by
- checkEOF().
- -->
- <Match>
- <Class name="org.apache.hadoop.io.file.tfile.Utils" />
- <Method name="writeVLong" />
- <Bug pattern="SF_SWITCH_FALLTHROUGH" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.io.Text" />
- <Method name="bytesToCodePoint" />
- <Bug pattern="SF_SWITCH_NO_DEFAULT" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.util.PureJavaCrc32C" />
- <Method name="update" />
- <Bug pattern="SF_SWITCH_NO_DEFAULT" />
- </Match>
- <!--
- The switch condition fall through is intentional and for performance
- purposes.
- -->
- <Match>
- <Class name="org.apache.hadoop.log.EventCounter"/>
- <!-- backward compatibility -->
- <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
- </Match>
- <Match>
- <Class name="org.apache.hadoop.metrics.jvm.EventCounter"/>
- <!-- backward compatibility -->
- <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngineProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtocolInfoProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.IpcConnectionContextProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcHeaderProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ha\.proto\.HAServiceProtocolProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ha\.proto\.ZKFCProtocolProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.security\.proto\.SecurityProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.TestProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.proto\.RefreshCallQueueProtocolProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.proto\.GenericRefreshProtocolProtos.*"/>
- </Match>
- <Match>
- <!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.tracing\.TraceAdminPB.*"/>
- </Match>
- <!--
- Manually checked, misses child thread manually syncing on parent's intrinsic lock.
- -->
- <Match>
- <Class name="org.apache.hadoop.metrics2.lib.MutableQuantiles" />
- <Field name="previousSnapshot" />
- <Bug pattern="IS2_INCONSISTENT_SYNC" />
- </Match>
- <!--
- The method uses a generic type T that extends two other types
- T1 and T2. Findbugs complains of a cast from T1 to T2.
- -->
- <Match>
- <Class name="org.apache.hadoop.fs.DelegationTokenRenewer" />
- <Method name="removeRenewAction" />
- <Bug pattern="BC_UNCONFIRMED_CAST" />
- </Match>
- <!--
- The switch condition for INITIATE is expected to fallthru to RESPONSE
- to process initial sasl response token included in the INITIATE
- -->
- <Match>
- <Class name="org.apache.hadoop.ipc.Server$Connection" />
- <Method name="processSaslMessage" />
- <Bug pattern="SF_SWITCH_FALLTHROUGH" />
- </Match>
- <!-- WA_NOT_IN_LOOP is invalid in util.concurrent.AsyncGet$Util.wait. -->
- <Match>
- <Class name="org.apache.hadoop.util.concurrent.AsyncGet$Util" />
- <Method name="wait" />
- <Bug pattern="WA_NOT_IN_LOOP" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.service.AbstractService" />
- <Method name="stop" />
- <Bug code="JLM" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.service.AbstractService" />
- <Method name="waitForServiceToStop" />
- <Bug code="JLM" />
- </Match>
- <!--
- OpenStack Swift FS module -closes streams in a different method
- from where they are opened.
- -->
- <Match>
- <Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
- <Method name="uploadFileAttempt"/>
- <Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
- </Match>
- <Match>
- <Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
- <Method name="uploadFilePartAttempt"/>
- <Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
- </Match>
- <!-- code from maven source, null value is checked at callee side. -->
- <Match>
- <Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />
- <Method name="compareTo" />
- <Bug code="NP" />
- </Match>
- <Match>
- <Class name="org.apache.hadoop.util.HttpExceptionUtils"/>
- <Method name="validateResponse"/>
- <Bug pattern="REC_CATCH_EXCEPTION"/>
- </Match>
- <Match>
- <Class name="org.apache.hadoop.conf.Configuration"/>
- <Method name="loadProperty"/>
- <Bug pattern="NP_NULL_PARAM_DEREF"/>
- </Match>
- <Match>
- <Class name="org.apache.hadoop.ipc.ExternalCall"/>
- <Filed name="done"/>
- <Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/>
- </Match>
- </FindBugsFilter>
|