1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.android.tradefed.result;
17 
18 import com.android.ddmlib.testrunner.TestResult.TestStatus;
19 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
20 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
21 import com.android.tradefed.retry.MergeStrategy;
22 
23 import java.util.ArrayList;
24 import java.util.Arrays;
25 import java.util.HashMap;
26 import java.util.LinkedHashMap;
27 import java.util.List;
28 import java.util.Map;
29 import java.util.Objects;
30 
31 /** Container for a result of a single test. */
32 public class TestResult {
33     // Key that mark that an aggregation is hiding a failure.
34     public static final String IS_FLAKY = "is_flaky";
35 
36     private TestStatus mStatus;
37     private FailureDescription mFailureDescription;
38     private Map<String, String> mMetrics;
39     private HashMap<String, Metric> mProtoMetrics;
40     private Map<String, LogFile> mLoggedFiles;
41     // the start and end time of the test, measured via {@link System#currentTimeMillis()}
42     private long mStartTime = 0;
43     private long mEndTime = 0;
44 
TestResult()45     public TestResult() {
46         mStatus = TestStatus.INCOMPLETE;
47         mStartTime = System.currentTimeMillis();
48         mLoggedFiles = new LinkedHashMap<String, LogFile>();
49         mMetrics = new HashMap<>();
50         mProtoMetrics = new HashMap<>();
51     }
52 
53     /** Get the {@link TestStatus} result of the test. */
getStatus()54     public TestStatus getStatus() {
55         return mStatus;
56     }
57 
58     /**
59      * Get the associated {@link String} stack trace. Should be <code>null</code> if {@link
60      * #getStatus()} is {@link TestStatus#PASSED}.
61      */
getStackTrace()62     public String getStackTrace() {
63         if (mFailureDescription == null) {
64             return null;
65         }
66         return mFailureDescription.toString();
67     }
68 
69     /**
70      * Get the associated {@link FailureDescription}. Should be <code>null</code> if {@link
71      * #getStatus()} is {@link TestStatus#PASSED}.
72      */
getFailure()73     public FailureDescription getFailure() {
74         return mFailureDescription;
75     }
76 
77     /** Get the associated test metrics. */
getMetrics()78     public Map<String, String> getMetrics() {
79         return mMetrics;
80     }
81 
82     /** Get the associated test metrics in proto format. */
getProtoMetrics()83     public HashMap<String, Metric> getProtoMetrics() {
84         return mProtoMetrics;
85     }
86 
87     /** Set the test metrics, overriding any previous values. */
setMetrics(Map<String, String> metrics)88     public void setMetrics(Map<String, String> metrics) {
89         mMetrics = metrics;
90     }
91 
92     /** Set the test proto metrics format, overriding any previous values. */
setProtoMetrics(HashMap<String, Metric> metrics)93     public void setProtoMetrics(HashMap<String, Metric> metrics) {
94         mProtoMetrics = metrics;
95     }
96 
97     /** Add a logged file tracking associated with that test case */
addLoggedFile(String dataName, LogFile loggedFile)98     public void addLoggedFile(String dataName, LogFile loggedFile) {
99         mLoggedFiles.put(dataName, loggedFile);
100     }
101 
102     /** Returns a copy of the map containing all the logged file associated with that test case. */
getLoggedFiles()103     public Map<String, LogFile> getLoggedFiles() {
104         return new LinkedHashMap<>(mLoggedFiles);
105     }
106 
107     /**
108      * Return the {@link System#currentTimeMillis()} time that the {@link
109      * ITestInvocationListener#testStarted(TestDescription)} event was received.
110      */
getStartTime()111     public long getStartTime() {
112         return mStartTime;
113     }
114 
115     /**
116      * Allows to set the time when the test was started, to be used with {@link
117      * ITestInvocationListener#testStarted(TestDescription, long)}.
118      */
setStartTime(long startTime)119     public void setStartTime(long startTime) {
120         mStartTime = startTime;
121     }
122 
123     /**
124      * Return the {@link System#currentTimeMillis()} time that the {@link
125      * ITestInvocationListener#testEnded(TestDescription, Map)} event was received.
126      */
getEndTime()127     public long getEndTime() {
128         return mEndTime;
129     }
130 
131     /** Set the {@link TestStatus}. */
setStatus(TestStatus status)132     public TestResult setStatus(TestStatus status) {
133         mStatus = status;
134         return this;
135     }
136 
137     /** Set the stack trace. */
setStackTrace(String stackTrace)138     public void setStackTrace(String stackTrace) {
139         mFailureDescription = FailureDescription.create(stackTrace);
140     }
141 
142     /** Set the stack trace. */
setFailure(FailureDescription failureDescription)143     public void setFailure(FailureDescription failureDescription) {
144         mFailureDescription = failureDescription;
145     }
146 
147     /** Sets the end time */
setEndTime(long currentTimeMillis)148     public void setEndTime(long currentTimeMillis) {
149         mEndTime = currentTimeMillis;
150     }
151 
152     @Override
hashCode()153     public int hashCode() {
154         return Arrays.hashCode(new Object[] {mMetrics, mFailureDescription, mStatus});
155     }
156 
157     @Override
equals(Object obj)158     public boolean equals(Object obj) {
159         if (this == obj) {
160             return true;
161         }
162         if (obj == null) {
163             return false;
164         }
165         if (getClass() != obj.getClass()) {
166             return false;
167         }
168         TestResult other = (TestResult) obj;
169         return Objects.equals(mMetrics, other.mMetrics)
170                 && Objects.equals(
171                         String.valueOf(mFailureDescription),
172                         String.valueOf(other.mFailureDescription))
173                 && Objects.equals(mStatus, other.mStatus);
174     }
175 
markFlaky()176     private void markFlaky() {
177         mProtoMetrics.put(
178                 IS_FLAKY,
179                 Metric.newBuilder()
180                         .setMeasurements(Measurements.newBuilder().setSingleString("true").build())
181                         .build());
182     }
183 
184     /**
185      * Merge the attempts for a same test case based on the merging strategy.
186      *
187      * @param results List of {@link TestResult} that will be merged
188      * @param strategy the {@link MergeStrategy} to be used to determine the merging outcome.
189      * @return the merged {@link TestResult} or null if there is nothing to merge.
190      */
merge(List<TestResult> results, MergeStrategy strategy)191     public static TestResult merge(List<TestResult> results, MergeStrategy strategy) {
192         if (results.isEmpty()) {
193             return null;
194         }
195         if (MergeStrategy.NO_MERGE.equals(strategy)) {
196             throw new IllegalArgumentException(
197                     "TestResult#merge cannot be called with NO_MERGE strategy.");
198         }
199         TestResult mergedResult = new TestResult();
200 
201         long earliestStartTime = Long.MAX_VALUE;
202         long latestEndTime = Long.MIN_VALUE;
203 
204         List<FailureDescription> errors = new ArrayList<>();
205         int pass = 0;
206         int fail = 0;
207         int assumption_failure = 0;
208         int ignored = 0;
209         int incomplete = 0;
210 
211         for (TestResult attempt : results) {
212             mergedResult.mProtoMetrics.putAll(attempt.getProtoMetrics());
213             mergedResult.mMetrics.putAll(attempt.getMetrics());
214             mergedResult.mLoggedFiles.putAll(attempt.getLoggedFiles());
215             earliestStartTime = Math.min(attempt.getStartTime(), earliestStartTime);
216             latestEndTime = Math.max(attempt.getEndTime(), latestEndTime);
217             switch (attempt.getStatus()) {
218                 case PASSED:
219                     pass++;
220                     break;
221                 case FAILURE:
222                     fail++;
223                     if (attempt.getFailure() != null) {
224                         errors.add(attempt.getFailure());
225                     }
226                     break;
227                 case INCOMPLETE:
228                     incomplete++;
229                     errors.add(FailureDescription.create("incomplete test case result."));
230                     break;
231                 case ASSUMPTION_FAILURE:
232                     assumption_failure++;
233                     if (attempt.getFailure() != null) {
234                         errors.add(attempt.getFailure());
235                     }
236                     break;
237                 case IGNORED:
238                     ignored++;
239                     break;
240             }
241         }
242 
243         switch (strategy) {
244             case ANY_PASS_IS_PASS:
245             case ONE_TESTCASE_PASS_IS_PASS:
246                 // We prioritize passing the test due to the merging strategy.
247                 if (pass > 0) {
248                     mergedResult.setStatus(TestStatus.PASSED);
249                     if (fail > 0) {
250                         mergedResult.markFlaky();
251                     }
252                 } else if (fail == 0) {
253                     if (ignored > 0) {
254                         mergedResult.setStatus(TestStatus.IGNORED);
255                     } else if (assumption_failure > 0) {
256                         mergedResult.setStatus(TestStatus.ASSUMPTION_FAILURE);
257                     } else if (incomplete > 0) {
258                         mergedResult.setStatus(TestStatus.INCOMPLETE);
259                     }
260                 } else {
261                     mergedResult.setStatus(TestStatus.FAILURE);
262                 }
263                 break;
264             default:
265                 // We keep a default of one failure is a failure that should be reported.
266                 if (fail > 0) {
267                     mergedResult.setStatus(TestStatus.FAILURE);
268                 } else {
269                     if (ignored > 0) {
270                         mergedResult.setStatus(TestStatus.IGNORED);
271                     } else if (assumption_failure > 0) {
272                         mergedResult.setStatus(TestStatus.ASSUMPTION_FAILURE);
273                     } else if (incomplete > 0) {
274                         mergedResult.setStatus(TestStatus.INCOMPLETE);
275                     } else {
276                         mergedResult.setStatus(TestStatus.PASSED);
277                     }
278                 }
279                 break;
280         }
281         if (errors.isEmpty()) {
282             mergedResult.mFailureDescription = null;
283         } else if (errors.size() == 1) {
284             mergedResult.mFailureDescription = errors.get(0);
285         } else {
286             mergedResult.mFailureDescription = new MultiFailureDescription(errors);
287         }
288         mergedResult.setStartTime(earliestStartTime);
289         mergedResult.setEndTime(latestEndTime);
290         return mergedResult;
291     }
292 }
293