Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 21 additions & 3 deletions RecoTracker/LSTCore/interface/alpaka/Common.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,27 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {

namespace pt3dnn {
HOST_DEVICE_CONSTANT float kEta_norm = 2.5f;
HOST_DEVICE_CONSTANT float kWp[kEtaBins] = {
0.189f, 0.1805f, 0.2267f, 0.3104f, 0.4719f, 0.3159f, 0.1372f, 0.1571f, 0.3198f, 0.186f};
HOST_DEVICE_CONSTANT float kWpHigh = 0.0473f;

// 95% sig-efficiency for abs(eta) <= 1.25, 84% for abs(eta) > 1.25
HOST_DEVICE_CONSTANT float kWp_pT3[kEtaBins] = {
0.6288f, 0.8014f, 0.7218f, 0.743f, 0.7519f, 0.8633f, 0.6934f, 0.6983f, 0.6502f, 0.7037f};
// 95% sig-efficiency for high pT bin
HOST_DEVICE_CONSTANT float kWpHigh_pT3 = 0.657f;
// 99.5% sig-efficiency for abs(eta) <= 1.25, 99% for abs(eta) > 1.25
HOST_DEVICE_CONSTANT float kWp_pT5[kEtaBins] = {
0.1227f, 0.1901f, 0.218f, 0.3438f, 0.1011f, 0.1502f, 0.0391f, 0.0471f, 0.1444f, 0.1007f};
// 99.5% signal efficiency for high pT bin
HOST_DEVICE_CONSTANT float kWpHigh_pT5 = 0.1498f;

// kWp's must be defined with inline static in the structs to compile.
struct pT3WP {
ALPAKA_FN_ACC static inline float wp(unsigned i) { return kWp_pT3[i]; }
ALPAKA_FN_ACC static inline float wpHigh() { return kWpHigh_pT3; }
};
struct pT5WP {
ALPAKA_FN_ACC static inline float wp(unsigned i) { return kWp_pT5[i]; }
ALPAKA_FN_ACC static inline float wpHigh() { return kWpHigh_pT5; }
};
} // namespace pt3dnn

} // namespace dnn
Expand Down
26 changes: 15 additions & 11 deletions RecoTracker/LSTCore/src/alpaka/NeuralNetwork.h
Original file line number Diff line number Diff line change
Expand Up @@ -140,25 +140,29 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {

namespace pt3dnn {

template <typename TAcc>
template <typename WP, typename TAcc>
ALPAKA_FN_ACC ALPAKA_FN_INLINE bool runInference(TAcc const& acc,
const float rPhiChiSquared,
const float tripletRadius,
const float pixelRadius,
const float pixRadiusError,
const float rzChiSquared,
const float pixelEta,
const float pixelPt) {
constexpr unsigned int kInputFeatures = 6;
const float pixelPt,
const int moduleType3) {
constexpr unsigned int kInputFeatures = 7;
constexpr unsigned int kHiddenFeatures = 32;
constexpr unsigned int kOutputFeatures = 1;

float x[kInputFeatures] = {alpaka::math::log10(acc, rPhiChiSquared),
alpaka::math::log10(acc, tripletRadius),
alpaka::math::log10(acc, pixelRadius),
alpaka::math::log10(acc, pixRadiusError),
alpaka::math::log10(acc, rzChiSquared),
alpaka::math::abs(acc, pixelEta) / dnn::pt3dnn::kEta_norm};
float x[kInputFeatures] = {
alpaka::math::log10(acc, rPhiChiSquared),
alpaka::math::log10(acc, tripletRadius),
alpaka::math::log10(acc, pixelRadius),
alpaka::math::log10(acc, pixRadiusError),
alpaka::math::log10(acc, rzChiSquared),
alpaka::math::abs(acc, pixelEta) / dnn::pt3dnn::kEta_norm,
static_cast<float>(moduleType3),
};

float x1[kHiddenFeatures];
float x2[kHiddenFeatures];
Expand All @@ -179,9 +183,9 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {
: static_cast<unsigned int>(alpaka::math::abs(acc, pixelEta) / dnn::kEtaSize);

if (pixelPt > 5.0f)
return output > dnn::pt3dnn::kWpHigh;
return output > WP::wpHigh();

return output > dnn::pt3dnn::kWp[bin_index];
return output > WP::wp(bin_index);
}

} // namespace pt3dnn
Expand Down
42 changes: 21 additions & 21 deletions RecoTracker/LSTCore/src/alpaka/PixelQuintuplet.h
Original file line number Diff line number Diff line change
Expand Up @@ -491,27 +491,27 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {
float pixelRadiusTemp, tripletRadius, rPhiChiSquaredTemp, rzChiSquaredTemp, rPhiChiSquaredInwardsTemp, centerXTemp,
centerYTemp, pixelRadiusErrorTemp;

if (not runPixelTripletDefaultAlgo(acc,
modules,
ranges,
mds,
segments,
pixelSeeds,
pixelSegments,
triplets,
pixelSegmentIndex,
t5InnerT3Index,
pixelRadiusTemp,
tripletRadius,
centerXTemp,
centerYTemp,
rzChiSquaredTemp,
rPhiChiSquaredTemp,
rPhiChiSquaredInwardsTemp,
pixelRadiusErrorTemp,
ptCut,
true,
false))
if (not runPixelTripletDefaultAlgo<dnn::pt3dnn::pT5WP>(acc,
modules,
ranges,
mds,
segments,
pixelSeeds,
pixelSegments,
triplets,
pixelSegmentIndex,
t5InnerT3Index,
pixelRadiusTemp,
tripletRadius,
centerXTemp,
centerYTemp,
rzChiSquaredTemp,
rPhiChiSquaredTemp,
rPhiChiSquaredInwardsTemp,
pixelRadiusErrorTemp,
ptCut,
true,
false))
return false;

unsigned int firstSegmentIndex = triplets.segmentIndices()[t5InnerT3Index][0];
Expand Down
132 changes: 13 additions & 119 deletions RecoTracker/LSTCore/src/alpaka/PixelTriplet.h
Original file line number Diff line number Diff line change
Expand Up @@ -312,107 +312,6 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {
return chiSquared;
}

//90pc threshold
ALPAKA_FN_ACC ALPAKA_FN_INLINE bool passPT3RPhiChiSquaredCuts(ModulesConst modules,
uint16_t lowerModuleIndex1,
uint16_t lowerModuleIndex2,
uint16_t lowerModuleIndex3,
float chiSquared) {
const int layer1 =
modules.layers()[lowerModuleIndex1] + 6 * (modules.subdets()[lowerModuleIndex1] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex1] == Endcap and modules.moduleType()[lowerModuleIndex1] == TwoS);
const int layer2 =
modules.layers()[lowerModuleIndex2] + 6 * (modules.subdets()[lowerModuleIndex2] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex2] == Endcap and modules.moduleType()[lowerModuleIndex2] == TwoS);
const int layer3 =
modules.layers()[lowerModuleIndex3] + 6 * (modules.subdets()[lowerModuleIndex3] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex3] == Endcap and modules.moduleType()[lowerModuleIndex3] == TwoS);

if (layer1 == 8 and layer2 == 9 and layer3 == 10) {
return chiSquared < 7.003f;
} else if (layer1 == 8 and layer2 == 9 and layer3 == 15) {
return chiSquared < 0.5f;
} else if (layer1 == 7 and layer2 == 8 and layer3 == 9) {
return chiSquared < 8.046f;
} else if (layer1 == 7 and layer2 == 8 and layer3 == 14) {
return chiSquared < 0.575f;
} else if (layer1 == 1 and layer2 == 2 and layer3 == 7) {
return chiSquared < 5.304f;
} else if (layer1 == 1 and layer2 == 2 and layer3 == 3) {
return chiSquared < 10.6211f;
} else if (layer1 == 1 and layer2 == 7 and layer3 == 8) {
return chiSquared < 4.617f;
} else if (layer1 == 2 and layer2 == 7 and layer3 == 8) {
return chiSquared < 8.046f;
} else if (layer1 == 2 and layer2 == 7 and layer3 == 13) {
return chiSquared < 0.435f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 7) {
return chiSquared < 9.244f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 12) {
return chiSquared < 0.287f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 4) {
return chiSquared < 18.509f;
}

return true;
}

ALPAKA_FN_ACC ALPAKA_FN_INLINE bool passPT3RPhiChiSquaredInwardsCuts(ModulesConst modules,
uint16_t lowerModuleIndex1,
uint16_t lowerModuleIndex2,
uint16_t lowerModuleIndex3,
float chiSquared) {
const int layer1 =
modules.layers()[lowerModuleIndex1] + 6 * (modules.subdets()[lowerModuleIndex1] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex1] == Endcap and modules.moduleType()[lowerModuleIndex1] == TwoS);
const int layer2 =
modules.layers()[lowerModuleIndex2] + 6 * (modules.subdets()[lowerModuleIndex2] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex2] == Endcap and modules.moduleType()[lowerModuleIndex2] == TwoS);
const int layer3 =
modules.layers()[lowerModuleIndex3] + 6 * (modules.subdets()[lowerModuleIndex3] == Endcap) +
5 * (modules.subdets()[lowerModuleIndex3] == Endcap and modules.moduleType()[lowerModuleIndex3] == TwoS);

if (layer1 == 7 and layer2 == 8 and layer3 == 9) // endcap layer 1,2,3, ps
{
return chiSquared < 22016.8055f;
} else if (layer1 == 7 and layer2 == 8 and layer3 == 14) // endcap layer 1,2,3 layer3->2s
{
return chiSquared < 935179.56807f;
} else if (layer1 == 8 and layer2 == 9 and layer3 == 10) // endcap layer 2,3,4
{
return chiSquared < 29064.12959f;
} else if (layer1 == 8 and layer2 == 9 and layer3 == 15) // endcap layer 2,3,4, layer3->2s
{
return chiSquared < 935179.5681f;
} else if (layer1 == 1 and layer2 == 2 and layer3 == 3) // barrel 1,2,3
{
return chiSquared < 1370.0113195101474f;
} else if (layer1 == 1 and layer2 == 2 and layer3 == 7) // barrel 1,2 endcap 1
{
return chiSquared < 5492.110048314815f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 4) // barrel 2,3,4
{
return chiSquared < 4160.410806470067f;
} else if (layer1 == 1 and layer2 == 7 and layer3 == 8) // barrel 1, endcap 1,2
{
return chiSquared < 29064.129591225726f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 7) // barrel 2,3 endcap 1
{
return chiSquared < 12634.215376250893f;
} else if (layer1 == 2 and layer2 == 3 and layer3 == 12) // barrel 2,3, endcap 1->2s
{
return chiSquared < 353821.69361145404f;
} else if (layer1 == 2 and layer2 == 7 and layer3 == 8) // barrel2, endcap 1,2
{
return chiSquared < 33393.26076341235f;
} else if (layer1 == 2 and layer2 == 7 and layer3 == 13) //barrel 2, endcap 1, endcap2->2s
{
return chiSquared < 935179.5680742573f;
}

return true;
}

ALPAKA_FN_ACC ALPAKA_FN_INLINE bool checkIntervalOverlappT3(float firstMin,
float firstMax,
float secondMin,
Expand Down Expand Up @@ -630,7 +529,7 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {
return RMSE;
}

template <typename TAcc>
template <typename WP = dnn::pt3dnn::pT3WP, typename TAcc>
ALPAKA_FN_ACC ALPAKA_FN_INLINE bool runPixelTripletDefaultAlgo(TAcc const& acc,
ModulesConst modules,
ObjectRangesConst ranges,
Expand Down Expand Up @@ -771,30 +670,25 @@ namespace ALPAKA_ACCELERATOR_NAMESPACE::lst {

rPhiChiSquared =
computePT3RPhiChiSquared(acc, modules, lowerModuleIndices, pixelG, pixelF, pixelRadiusPCA, xs, ys);
if (runChiSquaredCuts && pixelSegmentPt < 5.0f) {
if (!passPT3RPhiChiSquaredCuts(modules, lowerModuleIndex, middleModuleIndex, upperModuleIndex, rPhiChiSquared))
return false;
}

rPhiChiSquaredInwards = computePT3RPhiChiSquaredInwards(g, f, tripletRadius, xPix, yPix);
if (runChiSquaredCuts && pixelSegmentPt < 5.0f) {
if (!passPT3RPhiChiSquaredInwardsCuts(
modules, lowerModuleIndex, middleModuleIndex, upperModuleIndex, rPhiChiSquaredInwards))
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah, rPhiChiSquaredInwards would be another variable not used in the dnn

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I tried to add it a while ago but it didn't add any performance. It is still included in the "score" variable however, which is why I had to leave it for now.

return false;
}
}

centerX = 0;
centerY = 0;

if (runDNN and !lst::pt3dnn::runInference(acc,
rPhiChiSquared,
tripletRadius,
pixelRadius,
pixelRadiusError,
rzChiSquared,
pixelSeeds.eta()[pixelSegmentArrayIndex],
pixelSegmentPt)) {
// Module type of last anchor hit for the T3.
const int module_type_3 = modules.moduleType()[upperModuleIndex];

if (runDNN and !lst::pt3dnn::runInference<WP>(acc,
rPhiChiSquared,
tripletRadius,
pixelRadius,
pixelRadiusError,
rzChiSquared,
pixelSeeds.eta()[pixelSegmentArrayIndex],
pixelSegmentPt,
module_type_3)) {
return false;
}

Expand Down
Loading