5
\$\begingroup\$

Part 1: JSON Test Harness: Part 1
Part 3: JSON Test Harness: Part 3
Part 4: JSON Test Harness: Part 4

Time to review some test harness code I have written.

https://github.com/Loki-Astari/JsonBenchmark

I wrote a C++ JSON parser/serializer and wanted to compare performance against other JSON parsers. This was originally written by somebody else (credit were credit is due (see README)) but has been highly modified to use better C++ idioms (I hope).

This review is for the test suite. I tried to design this so others could easily add new tests to the existing test suites and also add additional test suites.

https://github.com/Loki-Astari/JsonBenchmark/tree/master/src/benchmark

TestSuite.h

#ifndef THORS_ANVIL_BENCHMARK_BENCHMARK_H
#define THORS_ANVIL_BENCHMARK_BENCHMARK_H

#include "filesystem.h"
#include "ThirdParty/test.h"
#include "ThirdParty/TestManager.h"
#include <string>
#include <vector>
#include <fstream>

namespace ThorsAnvil
{
    namespace Benchmark
    {

struct Options
{
    std::string     testFilter      =R"([^/]*/[^/]*)";
    std::string     parserFilter    = "";
    std::ofstream   conformance;
    std::ofstream   performance;
};

class Test
{
    public:
        FileSystem::Path    path;
        std::string         input;
        std::string         output;

        Test(FileSystem::Path const& path);
        void clear();
        friend std::ostream& operator<<(std::ostream& str, Test const& test);
};

enum State {NotImplemented, Pass, Fail};
class TestSetUp
{
    TestBase const& parser;
    std::string     name;
    bool            callFuncs;
    public:
        TestSetUp(TestBase const& parser, std::string const& name, bool callFuncs);
        ~TestSetUp();
};

class TestSuite
{
    using Cont = std::vector<Test>;
    protected:
        Options&    options;
        Cont        tests;
    public:
        TestSuite(Options& options);
        void executeTestOnAllParsers(ParsrList const& parsrList);
        virtual void executeTest(TestBase const& parser);
        virtual State executeTest(TestBase const& parser, Test const& test) = 0;

        /* Interface for the range based for() */
        using iterator = Cont::iterator;
        iterator begin()                                            {return tests.begin();}
        iterator end()                                              {return tests.end();}
        void     emplace_back(FileSystem::Path const& path)         {tests.emplace_back(path);}

        virtual std::string getDir() const = 0;
    private:
        struct DataLoader
        {
            TestSuite& benchmark;
            DataLoader(TestSuite& benchmark):benchmark(benchmark)   {benchmark.preload();}
           ~DataLoader()                                            {benchmark.clear();}
        };

        void preload();
        void clear();

        /* Used by preload() */
        virtual void preloadData(Test&) = 0;

        /* used in executeTest() to init TestSetUp() */
        virtual std::string setupName(Test const&) = 0;
        virtual bool useSetUp() const                               {return true;}

        /* used executeTest() */
        virtual void generateConPerData(TestBase const& parser, Test const& test, State state) = 0;
        virtual void printResults(TestBase const& parser, int (&count)[3], std::vector<Test const*>& failed);

};

    }
}

#endif

TestSuite.cpp

#include "TestSuite.h"

using namespace ThorsAnvil::Benchmark;

Test::Test(FileSystem::Path const& path)
    : path(path)
{}

void Test::clear()
{
    std::string().swap(output);  // Shrink to default size
    std::string().swap(input);   // Shrink to default size
}

namespace ThorsAnvil
{
    namespace Benchmark
    {

std::ostream& operator<<(std::ostream& str, Test const& test)
{
    return str << test.path.str();
}
    }
}

TestSetUp::TestSetUp(TestBase const& parser, std::string const& name, bool callFuncs)
    : parser(parser)
    , name(name)
    , callFuncs(callFuncs)
{
    if (callFuncs)
    {   parser.SetUp(name.c_str());
    }
}

TestSetUp::~TestSetUp()
{
    if (callFuncs)
    {   parser.TearDown(name.c_str());
    }
}

TestSuite::TestSuite(Options& options)
    : options(options)
{}

void TestSuite::executeTestOnAllParsers(ParsrList const& parsrList)
{
    std::cerr << "BenchMark: " << getDir() << "\n";
    if (!tests.empty())
    {
        DataLoader  loadData(*this);

        for (auto const& parser: parsrList)
        {
            executeTest(*parser);
        }
    }
}

void TestSuite::executeTest(TestBase const& parser)
{
    int count[3] = {0, 0, 0};
    std::vector<Test const*>  failed;

    for (auto const& test: tests)
    {
        TestSetUp   testSetUp(parser, setupName(test), useSetUp());

        State state = executeTest(parser, test);
        generateConPerData(parser, test, state);
        ++count[static_cast<int>(state)];
        if (state == Fail)
        {
            failed.push_back(&test);
        }
    }
    printResults(parser, count, failed);
}

void TestSuite::printResults(TestBase const& parser, int (&count)[3], std::vector<Test const*>& failed)
{
    std::cerr << "\tParser: " << parser.GetName();
    if (count[0] == 0 && count[2] == 0)
    {
        std::cerr << "  Perfect\n";
    }
    else
    {
        std::cerr << "\n";
        if (count[0] != 0)
        {
            std::cerr << "\t\tNot Implemented: " << count[0] << "\n";
        }
        std::cerr << "\t\tPass:            " << count[1] << "\n";
        std::cerr << "\t\tFail:            " << count[2] << "\n";
        for (auto const& fail: failed)
        {
            std::cerr << "\t\t\tFailed: " << *fail << "\n";
        }
    }
}

void TestSuite::preload()
{
    for (auto& test: tests)
    {
        preloadData(test);
    }
}

void TestSuite::clear()
{
    for (auto& test: tests)
    {
        test.clear();
    }
}

benchmark.cpp

#include "benchmarkConfig.h"
#include "TestSuite.h"
#include "ValidateString.h"
#include "ValidateFloat.h"
#include "PassChecker.h"
#include "FailChecker.h"
#include "RoundTripChecker.h"
#include "PerformanceChecker.h"

#include <list>
#include <regex>

namespace BM = ThorsAnvil::Benchmark;

using DirIter   = ThorsAnvil::FileSystem::DirectoryIterator;
using TestSuiteList = std::list<BM::TestSuite*>;

BM::Options getOptions(int argc, char* argv[]);
void        displayOptions();
void        getTestSuiteList(std::string const& testFilter, TestSuiteList& tSuiteList);
ParsrList   getParsrList(std::string const& parserFilter);

int main(int argc, char* argv[])
{
    BM::Options  options = getOptions(argc, argv);
    options.conformance << "Type,Library,Test,Result\n";
    options.performance << "Type,Library,Filename,Time (ms),Memory (byte),MemoryPeak (byte),AllocCount,LeakedBytes,LeakCount,FileSize (byte)\n";

    ParsrList    parsrList = getParsrList(options.parserFilter);

    BM::PassChecker         jsoncheckerPass(options);
    BM::FailChecker         jsoncheckerFail(options);
    BM::PerformanceChecker  performance(options);
    BM::RoundTripChecker    roundtrip(options);
    BM::ValidateFloat       validate_float(options);
    BM::ValidateString      validate_string(options);

    TestSuiteList tSuiteList = {&jsoncheckerPass,
                                &jsoncheckerFail,
                                &performance,
                                &roundtrip,
                                &validate_float,
                                &validate_string
                               };

    getTestSuiteList(options.testFilter, tSuiteList);
    for (auto const& test: tSuiteList)
    {
        test->executeTestOnAllParsers(parsrList);
    }
}

BM::Options getOptions(int argc, char* argv[])
{
    BM::Options result;
    int loop = 1;
    for (; loop < argc; ++loop)
    {
        if (strncmp(argv[loop], "--filter=", 9) == 0)
        {
            result.testFilter = argv[loop] + 9;
        }
        else if (strncmp(argv[loop], "--parser=", 9) == 0)
        {
            result.parserFilter = argv[loop] + 9;
        }
        else if (strcmp(argv[loop], "--help") == 0)
        {
            displayOptions();
            exit(0);
        }
        else if (strcmp(argv[loop], "--") == 0)
        {
            break;
        }
        else
        {
            if (argv[loop][0] != '-')
            {
                break;
            }
            std::cerr << "Invalid option: " << argv[loop] << "\n";
            displayOptions();
            exit(1);
        }
    }
    if (loop + 2 != argc)
    {
        std::cerr << "Invalid Options: Need two output file names\n";
        displayOptions();
        exit(1);
    }
    result.conformance.open(argv[loop]);
    if (!result.conformance)
    {
        std::cerr << "Failed to open 'conformance file'\n";
        exit(1);
    }
    result.performance.open(argv[loop + 1]);
    if (!result.performance)
    {
        std::cerr << "Failed to open 'performance file'\n";
        exit(1);
    }
    return result;
}

void displayOptions()
{
#pragma vera-pushoff
    std::cout << R"(
benchmark [--filter=<filter>] [--parser=<parser>] [--help] <conformance file> <performance file>
    filter: Default value [^/]*/[^/]*
        This option is used as a regular expression to decide what tests to perform.
        The first part decides what family of tests jsonchecker/performance/roundtrip.
        The second part specifies a test name.
    parser:
        If unused this will will run all the parsers.
        If specified it will run the specified parser only. It choose the parser
        by comparing the value given against the result of GetName() method of each parser.
    conformance file:
        File conformance data is written to.
    performance file:
        File performance data is written to.
)";
#pragma vera-pop
}

void getTestSuiteList(std::string const& testFilter, TestSuiteList& suiteList)
{
    for (auto const& dir: DirIter(QUOTE(DATA_DIR)))
    {
        if (!dir.isNormalDir())
        {   continue;
        }
        for (auto const& file: DirIter(dir.path()))
        {
            if (!file.isNormalFile())
            {   continue;
            }

            std::string testName = dir.name() + "/" + file.name();
            if (testName.find("EXCLUDE") != std::string::npos)
            {   continue;
            }
            std::regex  expression(testFilter + R"(\.json$)");
            if (!regex_search(testName, expression))
            {   continue;
            }

            auto find = std::find_if(std::begin(suiteList), std::end(suiteList),
                                     [&dir](BM::TestSuite const* test){return test->getDir() == dir.name();}
                                    );
            if (find != std::end(suiteList))
            {
                (*find)->emplace_back(file.path());
            }
        }
    }
}

ParsrList getParsrList(std::string const& parserFilter)
{
    ParsrList    result = TestManager::instance().getTests();
    if (parserFilter != "")
    {
        result.erase(std::remove_if(std::begin(result),
                                    std::end(result),
                                    [&parserFilter](TestBase const* parser){return parser->GetName() != parserFilter;}
                                   ),
                     std::end(result)
                    );
    }

    return result;
}
\$\endgroup\$

0

Browse other questions tagged or ask your own question.