Skip to content

Commit 81e0079

Browse files
Moving RangeTokenizer tool to module Algorithm
Adding also a unit test
1 parent a1c2523 commit 81e0079

5 files changed

Lines changed: 107 additions & 12 deletions

File tree

Algorithm/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ set(TEST_SRCS
4444
test/tableview.cxx
4545
test/pageparser.cxx
4646
test/test_mpl_tools.cxx
47+
test/test_RangeTokenizer.cxx
4748
)
4849

4950
O2_GENERATE_TESTS(

Detectors/TPC/workflow/src/RangeTokenizer.h renamed to Algorithm/include/Algorithm/RangeTokenizer.h

Lines changed: 39 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,47 @@
1818
#include <vector>
1919
#include <string>
2020
#include <sstream>
21-
#include <utility> // std::move
21+
#include <utility> // std::move
2222
#include <functional> // std::function
2323

2424
namespace o2
2525
{
2626

27+
/// @class RangeTokenizer
28+
/// @brief Tokenize a string according to delimiter ',' and extract values of type T
29+
///
30+
/// Extract a sequence of elements of specified type T from a string argument. Elements are
31+
/// separated by comma. If T is an integral type, also ranges are supported using '-'.
32+
///
33+
/// The default conversion from token to type is using std stringstream operator>> which
34+
/// supports a variety of built-in conversions.
35+
/// A custom handler function of type std::function<T(std::string const&)> can be provided
36+
/// to convert string tokens to the specified output type.
37+
///
38+
/// @return std::vector of type T
39+
///
40+
/// Usage:
41+
/// // the simple case using integral type
42+
/// std::vector<int> tokens = RangeTokenizer::tokenize<int>("0-5,10,13");
43+
///
44+
/// // simple case using string type
45+
/// std::vector<std::string> tokens = RangeTokenizer::tokenize<std::string>("apple,strawberry,tomato");
46+
///
47+
/// // process a custom type according to a map
48+
/// // use a custom mapper function, this evetually throws an exception if the token is not in the map
49+
/// enum struct Food { Apple,
50+
/// Strawberry,
51+
/// Tomato };
52+
/// const std::map<std::string, Food> FoodMap {
53+
/// { "apple", Food::Apple },
54+
/// { "strawberry", Food::Strawberry },
55+
/// { "tomato", Food::Tomato },
56+
/// };
57+
/// std::vector<Food> tokens = RangeTokenizer::tokenize<Food>("apple,tomato",
58+
/// [FoodMap](auto const& token) {
59+
/// return FoodMap.at(token);
60+
/// } );
2761
struct RangeTokenizer {
28-
/// tokenize a string according to delimiter ',' and extract values of type T
2962
template <typename T>
3063
static std::vector<T> tokenize(std::string input, std::function<T(std::string const&)> convert = [](std::string const& token) {T value; std::istringstream(token) >> value; return value; })
3164
{
@@ -36,7 +69,9 @@ struct RangeTokenizer {
3669
T value;
3770
if (std::is_integral<T>::value && token.find('-') != token.npos) {
3871
// extract range
39-
insertRange(res, token, convert);
72+
if constexpr (std::is_integral<T>::value) { // c++17 compile time
73+
insertRange(res, token, convert);
74+
}
4075
} else {
4176
res.emplace_back(convert(token));
4277
}
@@ -61,13 +96,7 @@ struct RangeTokenizer {
6196
}
6297
}
6398
}
64-
65-
// this is needed to make the compilation work, but never called
66-
template <typename T, typename std::enable_if_t<std::is_integral<T>::value == false, int> = 0>
67-
static void insertRange(std::vector<T>&, std::string, std::function<T(std::string const&)>)
68-
{
69-
}
70-
};
7199
};
100+
}; // namespace o2
72101

73102
#endif
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
// Copyright CERN and copyright holders of ALICE O2. This software is
2+
// distributed under the terms of the GNU General Public License v3 (GPL
3+
// Version 3), copied verbatim in the file "COPYING".
4+
//
5+
// See http://alice-o2.web.cern.ch/license for full licensing information.
6+
//
7+
// In applying this license CERN does not waive the privileges and immunities
8+
// granted to it by virtue of its status as an Intergovernmental Organization
9+
// or submit itself to any jurisdiction.
10+
11+
// @file test_RangeTokenizer.cxx
12+
// @author Matthias Richter
13+
// @since 2018-12-11
14+
// @brief Test program for RangeTokenizer
15+
16+
#define BOOST_TEST_MODULE Algorithm RangeTokenizer test
17+
#define BOOST_TEST_MAIN
18+
#define BOOST_TEST_DYN_LINK
19+
#include <boost/test/unit_test.hpp>
20+
#include "../include/Algorithm/RangeTokenizer.h"
21+
#include <vector>
22+
#include <map>
23+
24+
using RangeTokenizer = o2::RangeTokenizer;
25+
26+
BOOST_AUTO_TEST_CASE(test_simple_integral)
27+
{
28+
// the simple case using integral type
29+
std::vector<int> tokens = RangeTokenizer::tokenize<int>("0-5,10,13-15");
30+
std::vector<int> expected{ 0, 1, 2, 3, 4, 5, 10, 13, 14, 15 };
31+
BOOST_CHECK(tokens == expected);
32+
}
33+
34+
BOOST_AUTO_TEST_CASE(test_simple_string)
35+
{
36+
// simple case using string type
37+
std::vector<std::string> tokens = RangeTokenizer::tokenize<std::string>("apple,strawberry,tomato");
38+
BOOST_CHECK(tokens[0] == "apple");
39+
BOOST_CHECK(tokens[1] == "strawberry");
40+
BOOST_CHECK(tokens[2] == "tomato");
41+
}
42+
43+
BOOST_AUTO_TEST_CASE(test_mapped_custom)
44+
{
45+
// process a custom type according to a map
46+
enum struct Food { Apple,
47+
Strawberry,
48+
Tomato };
49+
50+
const std::map<std::string, Food> FoodMap{
51+
{ "apple", Food::Apple },
52+
{ "strawberry", Food::Strawberry },
53+
{ "tomato", Food::Tomato },
54+
};
55+
auto tester = [FoodMap](const char* arg) {
56+
// use a custom mapper function, this evetually throws an exception if the token is not in the map
57+
return std::move(RangeTokenizer::tokenize<Food>(arg, [FoodMap](auto const& token) { return FoodMap.at(token); }));
58+
};
59+
60+
auto tokens = tester("apple,tomato");
61+
BOOST_CHECK(tokens[0] == Food::Apple);
62+
BOOST_CHECK(tokens[1] == Food::Tomato);
63+
64+
BOOST_CHECK_THROW(tester("blueberry"), std::out_of_range);
65+
}

Detectors/TPC/workflow/src/RecoWorkflow.cxx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
#include "ClusterConverterSpec.h"
2222
#include "ClusterDecoderRawSpec.h"
2323
#include "CATrackerSpec.h"
24-
#include "RangeTokenizer.h"
24+
#include "Algorithm/RangeTokenizer.h"
2525
#include "TPCBase/Digit.h"
2626
#include "DataFormatsTPC/Constants.h"
2727
#include "DataFormatsTPC/ClusterGroupAttribute.h"

Detectors/TPC/workflow/src/tpc-reco-workflow.cxx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
#include "Framework/WorkflowSpec.h"
1717
#include "Framework/ConfigParamSpec.h"
1818
#include "TPCWorkflow/RecoWorkflow.h"
19-
#include "RangeTokenizer.h"
19+
#include "Algorithm/RangeTokenizer.h"
2020

2121
#include <string>
2222
#include <stdexcept>

0 commit comments

Comments
 (0)