summaryrefslogtreecommitdiff
path: root/src/tuning/transpose.cc
blob: 9039286659535cda5b36900a30aaec7faeeb652a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
// =================================================================================================
// This file is part of the CLBlast project. The project is licensed under Apache Version 2.0. This
// project loosely follows the Google C++ styleguide and uses a tab-size of two spaces and a max-
// width of 100 characters per line.
//
// Author(s):
//   Cedric Nugteren <www.cedricnugteren.nl>
//
// This file implements an auto-tuner to tune the transpose OpenCL kernels. It uses CLTune.
//
// =================================================================================================

#include <string>
#include <vector>
#include <stdexcept>

#include "internal/utilities.h"
#include "internal/tuning.h"

namespace clblast {
// =================================================================================================

// The transpose auto-tuner
template <typename T>
void TransposeTune(const Arguments<T> &args,
                   const std::vector<T> &a_mat, std::vector<T> &b_mat,
                   cltune::Tuner &tuner) {

  // This points to the PadTransposeMatrix kernel as found in the CLBlast library. This is just one
  // example of a transpose kernel. However, all kernels use the same tuning parameters, so one has
  // to be chosen as a representative.
  std::string common_source =
  #include "../src/kernels/common.opencl"
  std::string kernel_source =
  #include "../src/kernels/transpose.opencl"
  auto sources = common_source + kernel_source;
  auto id = tuner.AddKernelFromString(sources, "TransposeMatrix", {args.m, args.n}, {1, 1});
  tuner.SetReferenceFromString(sources, "TransposeMatrix", {args.m, args.n}, {8, 8});

  // Sets the tunable parameters and their possible values
  tuner.AddParameter(id, "TRA_DIM", {4, 8, 16, 32, 64});
  tuner.AddParameter(id, "TRA_WPT", {1, 2, 4, 8, 16});
  tuner.AddParameter(id, "TRA_PAD", {0, 1});

  // Tests for a specific precision
  tuner.AddParameter(id, "PRECISION", {static_cast<size_t>(args.precision)});
  tuner.AddParameterReference("PRECISION", static_cast<size_t>(args.precision));

  // Sets the constraints for local memory size limitations
  auto LocalMemorySize = [args] (std::vector<size_t> v) {
    return ((v[0]*v[1]*(v[0]*v[1]+v[2]))*GetBytes(args.precision));
  };
  tuner.SetLocalMemoryUsage(id, LocalMemorySize, {"TRA_DIM", "TRA_WPT", "TRA_PAD"});

  // Modifies the thread-sizes (both global and local) based on the parameters
  tuner.DivGlobalSize(id, {"TRA_WPT", "TRA_WPT"});
  tuner.MulLocalSize(id, {"TRA_DIM", "TRA_DIM"});

  // Sets the function's arguments
  tuner.AddArgumentScalar(static_cast<int>(args.m));
  tuner.AddArgumentInput(a_mat);
  tuner.AddArgumentOutput(b_mat);
}

// =================================================================================================

// Main function which calls the common client code with the routine-specific function as argument.
void TunerTranspose(int argc, char *argv[]) {
  switch(GetPrecision(argc, argv)) {
    case Precision::kHalf: throw std::runtime_error("Unsupported precision mode");
    case Precision::kSingle: TunerAB<float>(argc, argv, TransposeTune<float>); break;
    case Precision::kDouble: TunerAB<double>(argc, argv, TransposeTune<double>); break;
    case Precision::kComplexSingle: TunerAB<float2>(argc, argv, TransposeTune<float2>); break;
    case Precision::kComplexDouble: TunerAB<double2>(argc, argv, TransposeTune<double2>); break;
  }
}

// =================================================================================================
} // namespace clblast

// Main function (not within the clblast namespace)
int main(int argc, char *argv[]) {
  clblast::TunerTranspose(argc, argv);
  return 0;
}

// =================================================================================================