summaryrefslogtreecommitdiff
path: root/test/correctness/tester.cc
blob: 4a179718acbfa16046a895c8c0ca78ea2fcef2fd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
// =================================================================================================
// This file is part of the CLBlast project. The project is licensed under Apache Version 2.0. This
// project loosely follows the Google C++ styleguide and uses a tab-size of two spaces and a max-
// width of 100 characters per line.
//
// Author(s):
//   Cedric Nugteren <www.cedricnugteren.nl>
//
// This file implements the Tester class (see the header for information about the class).
//
// =================================================================================================

#include "correctness/tester.h"

#include <string>
#include <vector>
#include <iostream>
#include <cmath>

namespace clblast {
// =================================================================================================

// General constructor for all CLBlast testers. It prints out the test header to stdout and sets-up
// the clBLAS library for reference.
template <typename T>
Tester<T>::Tester(int argc, char *argv[], const bool silent,
                  const std::string &name, const std::vector<std::string> &options):
    help_("Options given/available:\n"),
    platform_(Platform(GetArgument(argc, argv, help_, kArgPlatform, size_t{0}))),
    device_(Device(platform_, kDeviceType, GetArgument(argc, argv, help_, kArgDevice, size_t{0}))),
    context_(Context(device_)),
    queue_(CommandQueue(context_, device_)),
    full_test_(CheckArgument(argc, argv, help_, kArgFullTest)),
    error_log_{},
    num_passed_{0},
    num_skipped_{0},
    num_failed_{0},
    print_count_{0},
    tests_passed_{0},
    tests_skipped_{0},
    tests_failed_{0},
    options_{options} {

  // Prints the help message (command-line arguments)
  if (!silent) { fprintf(stdout, "\n* %s\n", help_.c_str()); }

  // Prints the header
  fprintf(stdout, "* Running on OpenCL device '%s'.\n", device_.Name().c_str());
  fprintf(stdout, "* Starting tests for the %s'%s'%s routine.",
          kPrintMessage.c_str(), name.c_str(), kPrintEnd.c_str());

  // Checks whether the precision is supported
  if (!PrecisionSupported()) {
    fprintf(stdout, "\n* All tests skipped: %sUnsupported precision%s\n",
            kPrintWarning.c_str(), kPrintEnd.c_str());
    return;
  }

  // Prints the legend
  fprintf(stdout, " Legend:\n");
  fprintf(stdout, "   %s -> Test produced correct results\n", kSuccessData.c_str());
  fprintf(stdout, "   %s -> Test returned the correct error code\n", kSuccessStatus.c_str());
  fprintf(stdout, "   %s -> Test produced incorrect results\n", kErrorData.c_str());
  fprintf(stdout, "   %s -> Test returned an incorrect error code\n", kErrorStatus.c_str());
  fprintf(stdout, "   %s -> Test not executed: OpenCL-kernel compilation error\n",
          kSkippedCompilation.c_str());
  fprintf(stdout, "   %s -> Test not executed: Unsupported precision\n",
          kUnsupportedPrecision.c_str());

  // Initializes clBLAS
  auto status = clblasSetup();
  if (status != CL_SUCCESS) {
    throw std::runtime_error("clBLAS setup error: "+ToString(static_cast<int>(status)));
  }
}

// Destructor prints the summary of the test cases and cleans-up the clBLAS library
template <typename T>
Tester<T>::~Tester() {
  if (PrecisionSupported()) {
    fprintf(stdout, "* Completed all test-cases for this routine. Results:\n");
    fprintf(stdout, "   %lu test(s) passed\n", tests_passed_);
    if (tests_skipped_ > 0) { fprintf(stdout, "%s", kPrintWarning.c_str()); }
    fprintf(stdout, "   %lu test(s) skipped%s\n", tests_skipped_, kPrintEnd.c_str());
    if (tests_failed_ > 0) { fprintf(stdout, "%s", kPrintError.c_str()); }
    fprintf(stdout, "   %lu test(s) failed%s\n", tests_failed_, kPrintEnd.c_str());
  }
  fprintf(stdout, "\n");
  clblasTeardown();
}

// =================================================================================================

// Function called at the start of each test. This prints a header with information about the
// test and re-initializes all test data-structures.
template <typename T>
void Tester<T>::TestStart(const std::string &test_name, const std::string &test_configuration) {

  // Prints the header
  fprintf(stdout, "* Testing %s'%s'%s for %s'%s'%s:\n",
          kPrintMessage.c_str(), test_name.c_str(), kPrintEnd.c_str(),
          kPrintMessage.c_str(), test_configuration.c_str(), kPrintEnd.c_str());
  fprintf(stdout, "   ");

  // Empties the error log and the error/pass counters
  error_log_.clear();
  num_passed_ = 0;
  num_skipped_ = 0;
  num_failed_ = 0;
  print_count_ = 0;
}

// Function called at the end of each test. This prints errors if any occured. It also prints a
// summary of the number of sub-tests passed/failed.
template <typename T>
void Tester<T>::TestEnd() {
  fprintf(stdout, "\n");
  tests_passed_ += num_passed_;
  tests_failed_ += num_skipped_;
  tests_failed_ += num_failed_;

  // Prints details of all error occurences for these tests
  for (auto &entry: error_log_) {
    if (entry.error_percentage != kStatusError) {
      fprintf(stdout, "   Error rate %.1lf%%: ", entry.error_percentage);
    }
    else {
      fprintf(stdout, "   Status code %d (expected %d): ", entry.status_found, entry.status_expect);
    }
    for (auto &o: options_) {
      if (o == kArgM)        { fprintf(stdout, "%s=%lu ", kArgM, entry.args.m); }
      if (o == kArgN)        { fprintf(stdout, "%s=%lu ", kArgN, entry.args.n); }
      if (o == kArgK)        { fprintf(stdout, "%s=%lu ", kArgK, entry.args.k); }
      if (o == kArgLayout)   { fprintf(stdout, "%s=%d ", kArgLayout, entry.args.layout);}
      if (o == kArgATransp)  { fprintf(stdout, "%s=%d ", kArgATransp, entry.args.a_transpose);}
      if (o == kArgBTransp)  { fprintf(stdout, "%s=%d ", kArgBTransp, entry.args.b_transpose);}
      if (o == kArgSide)     { fprintf(stdout, "%s=%d ", kArgSide, entry.args.side);}
      if (o == kArgTriangle) { fprintf(stdout, "%s=%d ", kArgTriangle, entry.args.triangle);}
      if (o == kArgXInc)     { fprintf(stdout, "%s=%lu ", kArgXInc, entry.args.x_inc);}
      if (o == kArgYInc)     { fprintf(stdout, "%s=%lu ", kArgYInc, entry.args.y_inc);}
      if (o == kArgXOffset)  { fprintf(stdout, "%s=%lu ", kArgXOffset, entry.args.x_offset);}
      if (o == kArgYOffset)  { fprintf(stdout, "%s=%lu ", kArgYOffset, entry.args.y_offset);}
      if (o == kArgALeadDim) { fprintf(stdout, "%s=%lu ", kArgALeadDim, entry.args.a_ld);}
      if (o == kArgBLeadDim) { fprintf(stdout, "%s=%lu ", kArgBLeadDim, entry.args.b_ld);}
      if (o == kArgCLeadDim) { fprintf(stdout, "%s=%lu ", kArgCLeadDim, entry.args.c_ld);}
      if (o == kArgAOffset)  { fprintf(stdout, "%s=%lu ", kArgAOffset, entry.args.a_offset);}
      if (o == kArgBOffset)  { fprintf(stdout, "%s=%lu ", kArgBOffset, entry.args.b_offset);}
      if (o == kArgCOffset)  { fprintf(stdout, "%s=%lu ", kArgCOffset, entry.args.c_offset);}
    }
    fprintf(stdout, "\n");
  }

  // Prints a test summary
  auto pass_rate = 100*num_passed_ / static_cast<float>(num_passed_ + num_skipped_ + num_failed_);
  fprintf(stdout, "   Pass rate %s%5.1lf%%%s:", kPrintMessage.c_str(), pass_rate, kPrintEnd.c_str());
  fprintf(stdout, " %lu passed /", num_passed_);
  if (num_skipped_ != 0) {
    fprintf(stdout, " %s%lu skipped%s /", kPrintWarning.c_str(), num_skipped_, kPrintEnd.c_str());
  }
  else {
    fprintf(stdout, " %lu skipped /", num_skipped_);
  }
  if (num_failed_ != 0) {
    fprintf(stdout, " %s%lu failed%s\n", kPrintError.c_str(), num_failed_, kPrintEnd.c_str());
  }
  else {
    fprintf(stdout, " %lu failed\n", num_failed_);
  }
}

// =================================================================================================

// Compares two floating point values and returns whether they are within an acceptable error
// margin. This replaces GTest's EXPECT_NEAR().
template <typename T>
bool Tester<T>::TestSimilarity(const T val1, const T val2) {
  const auto difference = std::fabs(val1 - val2);

  // Shortcut, handles infinities
  if (val1 == val2) {
    return true;
  }
  // The values are zero or very small: the relative error is less meaningful
  else if (val1 == 0 || val2 == 0 || difference < static_cast<T>(kErrorMarginAbsolute)) {
    return (difference < static_cast<T>(kErrorMarginAbsolute));
  }
  // Use relative error
  else {
    return (difference / (std::fabs(val1)+std::fabs(val2))) < static_cast<T>(kErrorMarginRelative);
  }
}

// Specialisations for complex data-types
template <>
bool Tester<float2>::TestSimilarity(const float2 val1, const float2 val2) {
  auto real = Tester<float>::TestSimilarity(val1.real(), val2.real());
  auto imag = Tester<float>::TestSimilarity(val1.imag(), val2.imag());
  return (real && imag);
}
template <>
bool Tester<double2>::TestSimilarity(const double2 val1, const double2 val2) {
  auto real = Tester<double>::TestSimilarity(val1.real(), val2.real());
  auto imag = Tester<double>::TestSimilarity(val1.imag(), val2.imag());
  return (real && imag);
}

// =================================================================================================

// Handles a 'pass' or 'error' depending on whether there are any errors
template <typename T>
void Tester<T>::TestErrorCount(const size_t errors, const size_t size, const Arguments<T> &args) {

  // Finished successfully
  if (errors == 0) {
    PrintTestResult(kSuccessData);
    ReportPass();
  }

  // Error(s) occurred
  else {
    auto percentage = 100*errors / static_cast<float>(size);
    PrintTestResult(kErrorData);
    ReportError({StatusCode::kSuccess, StatusCode::kSuccess, percentage, args});
  }
}

// Compares two status codes for equality. The outcome can be a pass (they are the same), a warning
// (CLBlast reported a compilation error), or an error (they are different).
template <typename T>
void Tester<T>::TestErrorCodes(const StatusCode clblas_status, const StatusCode clblast_status,
                            const Arguments<T> &args) {

  // Finished successfully
  if (clblas_status == clblast_status) {
    PrintTestResult(kSuccessStatus);
    ReportPass();
  }

  // No support for this kind of precision
  else if (clblast_status == StatusCode::kNoDoublePrecision ||
           clblast_status == StatusCode::kNoHalfPrecision) {
    PrintTestResult(kUnsupportedPrecision);
    ReportSkipped();
  }

  // Could not compile the CLBlast kernel properly
  else if (clblast_status == StatusCode::kBuildProgramFailure ||
           clblast_status == StatusCode::kNotImplemented) {
    PrintTestResult(kSkippedCompilation);
    ReportSkipped();
  }

  // Error occurred
  else {
    PrintTestResult(kErrorStatus);
    ReportError({clblas_status, clblast_status, kStatusError, args});
  }
}

// =================================================================================================

// Retrieves a list of example scalar values, used for the alpha and beta arguments for the various
// routines. This function is specialised for the different data-types.
template <>
const std::vector<float> Tester<float>::GetExampleScalars() {
  if (full_test_) { return {0.0f, 1.0f, 3.14f}; }
  else { return {3.14f}; }
}
template <>
const std::vector<double> Tester<double>::GetExampleScalars() {
  if (full_test_) { return {0.0, 1.0, 3.14}; }
  else { return {3.14}; }
}
template <>
const std::vector<float2> Tester<float2>::GetExampleScalars() {
  if (full_test_) { return {{0.0f, 0.0f}, {1.0f, 1.3f}, {2.42f, 3.14f}}; }
  else { return {{2.42f, 3.14f}}; }
}
template <>
const std::vector<double2> Tester<double2>::GetExampleScalars() {
  if (full_test_) { return {{0.0, 0.0}, {1.0, 1.3}, {2.42, 3.14}}; }
  else { return {{2.42, 3.14}}; }
}

// Retrieves the offset values to test with
template <typename T>
const std::vector<size_t> Tester<T>::GetOffsets() {
  if (full_test_) { return {0, 10}; }
  else { return {0}; }
}

// =================================================================================================

template <> bool Tester<float>::PrecisionSupported() const { return true; }
template <> bool Tester<float2>::PrecisionSupported() const { return true; }
template <> bool Tester<double>::PrecisionSupported() const {
  auto extensions = device_.Extensions();
  return (extensions.find(kKhronosDoublePrecision) == std::string::npos) ? false : true;
}
template <> bool Tester<double2>::PrecisionSupported() const {
  auto extensions = device_.Extensions();
  return (extensions.find(kKhronosDoublePrecision) == std::string::npos) ? false : true;
}

// =================================================================================================

// A test can either pass, be skipped, or fail
template <typename T>
void Tester<T>::ReportPass() {
  num_passed_++;
}
template <typename T>
void Tester<T>::ReportSkipped() {
  num_skipped_++;
}
template <typename T>
void Tester<T>::ReportError(const ErrorLogEntry &error_log_entry) {
  error_log_.push_back(error_log_entry);
  num_failed_++;
}

// =================================================================================================

// Prints the test-result symbol to screen. This function limits the maximum number of symbols per
// line by printing newlines once every so many calls.
template <typename T>
void Tester<T>::PrintTestResult(const std::string &message) {
  if (print_count_ == kResultsPerLine) {
    print_count_ = 0;
    fprintf(stdout, "\n   ");
  }
  fprintf(stdout, "%s", message.c_str());
  std::cout << std::flush;
  print_count_++;
}

// =================================================================================================

// Compiles the templated class
template class Tester<float>;
template class Tester<double>;
template class Tester<float2>;
template class Tester<double2>;

// =================================================================================================
} // namespace clblast