From 230695985c0ed6401562ef563d6548f28f5a1a11 Mon Sep 17 00:00:00 2001 From: Vertexwahn Date: Wed, 14 Aug 2024 18:12:42 +0200 Subject: [PATCH] Add tokenize test GitOrigin-RevId: 8d12af5b0a85d52abea2cad51a9cff87e8d4ee7b --- .../flatland/rendering/scene/load_scene_test.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/devertexwahn/flatland/rendering/scene/load_scene_test.cpp b/devertexwahn/flatland/rendering/scene/load_scene_test.cpp index ce5f6714..d8a5e7df 100644 --- a/devertexwahn/flatland/rendering/scene/load_scene_test.cpp +++ b/devertexwahn/flatland/rendering/scene/load_scene_test.cpp @@ -16,10 +16,23 @@ namespace de_vertexwahn { Point3f convert_csv_to_point3f(const std::string& csv); template <> Transform44f read_transform<3>(const pugi::xml_node &xmlTransform); + std::vector tokenize(const std::string &string, + const std::string &delim = ", ", + bool include_empty = false); } using namespace de_vertexwahn; +TEST(tokenize, TokenizeTrivialList) { + std::string input = "1, 2, 3"; + auto tokens = tokenize(input); + + EXPECT_THAT(tokens.size(), 3u); + EXPECT_THAT(tokens[0], "1"); + EXPECT_THAT(tokens[1], "2"); + EXPECT_THAT(tokens[2], "3"); +} + TEST(load_scene, convert_csv_to_vector2f) { std::string input = "2.3, 3.4"; Vector2f v = convert_csv_to_vector2f(input);