diff --git a/test/.gitignore b/test/.gitignore index d4c66e876..edddd2e40 100644 --- a/test/.gitignore +++ b/test/.gitignore @@ -1 +1,2 @@ -/*.h +include/snippets/ +build/ diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 000000000..fe94c63e3 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1,42 @@ +cmake_minimum_required(VERSION 3.30 FATAL_ERROR) + +project(cp-algorithms LANGUAGES CXX) + +# generating snippets +set(SNIPPETS_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include/snippets) +find_package(Python3 3.10 REQUIRED COMPONENTS Interpreter) +execute_process( + COMMAND + ${Python3_EXECUTABLE} + "${CMAKE_CURRENT_SOURCE_DIR}/scripts/extract_snippets.py" + --src-dir=${CMAKE_CURRENT_SOURCE_DIR}/../src + --target-dir=${SNIPPETS_DIR} + --remove-prev-target-dir + COMMAND_ERROR_IS_FATAL ANY +) + +# loading googletest +include(FetchContent) +FetchContent_Declare( + googletest + GIT_REPOSITORY https://github.com/google/googletest.git + GIT_TAG v1.17.0 +) +# For Windows: Prevent overriding the parent project's compiler/linker settings +set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) +FetchContent_MakeAvailable(googletest) +include(GoogleTest) + +set(CMAKE_CXX_STANDARD 20) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +file(GLOB_RECURSE SNIPPETS_SOURCES CONFIGURE_DEPENDS ${SNIPPETS_DIR}/*.h) +file(GLOB_RECURSE TEST_SOURCES CONFIGURE_DEPENDS src/test_*.cpp) + +add_executable(main ${TEST_SOURCES} ${SNIPPETS_SOURCES}) +target_link_libraries(main PRIVATE GTest::gtest_main GTest::gtest GTest::gmock) +target_include_directories(main PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/include) + +enable_testing() +gtest_discover_tests(main) diff --git a/test/extract_snippets.py b/test/extract_snippets.py deleted file mode 100755 index 69e796678..000000000 --- a/test/extract_snippets.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/python3 -import re -import os - -def write_snippet(name, lines): - file_name = '{}.h'.format(name) - with open(file_name, 'w') as f: - for line in lines: - f.write(line) - -def extract_tests(filepath): - filepath_short = os.path.basename(filepath) - article_name = filepath_short.split('.')[0] - - snippet_start = re.compile(r"^```\{.cpp\s+file=(\S+)\}$") - snippet_end = re.compile(r"^```$") - - with open(filepath) as f: - in_snippet = False; - for line in f: - m_start = snippet_start.match(line) - m_end = snippet_end.match(line) - - if in_snippet and m_end: - in_snippet = False - write_snippet(snippet_name, lines) - - if in_snippet: - lines.append(line) - elif m_start: - snippet_name = m_start.group(1) - lines = [] - in_snippet = True - - -if __name__ == '__main__': - for subdir, dirs, files in os.walk('../src/'): - for filename in files: - if filename.endswith(".md"): - extract_tests(os.path.join(subdir, filename)) diff --git a/test/scripts/extract_snippets.py b/test/scripts/extract_snippets.py new file mode 100644 index 000000000..5a701ea65 --- /dev/null +++ b/test/scripts/extract_snippets.py @@ -0,0 +1,128 @@ +import argparse +import re +import os +import sys +import shutil +import logging +from typing import List, Optional +from dataclasses import dataclass + + +@dataclass +class Snippet: + name: str + lines: List[str] + + +def write_snippet(target_dir: os.PathLike, snippet: Snippet): + assert os.path.exists(target_dir) and os.path.isdir(target_dir) + + file_name = f'{snippet.name}.h' + with open(os.path.join(target_dir, file_name), 'w', encoding='utf-8') as f: + f.writelines(snippet.lines) + + +def extract_snippets(filepath: os.PathLike) -> List[Snippet]: + with open(filepath, 'r', encoding='utf-8') as f: + lines = f.readlines() + + snippets = [] + + snippet_start = re.compile(r"^```\{.cpp\s+file=(\S+)\}$") + snippet_end = re.compile(r"^```$") + + snippet_start_line: Optional[int] = None + snippet_name: Optional[str] = None + + for line_idx, line in enumerate(lines): + match_snippet_start = snippet_start.match(line) + match_snippet_end = snippet_end.match(line) + assert not (match_snippet_start and match_snippet_end) + + if match_snippet_start: + assert snippet_start_line is None + assert snippet_name is None + + snippet_start_line = line_idx + snippet_name = match_snippet_start.group(1) + elif match_snippet_end: + if snippet_start_line is not None: + assert snippet_start_line is not None + assert snippet_name is not None + + snippet = lines[snippet_start_line + 1: line_idx] + + snippets.append(Snippet(name=snippet_name, lines=snippet)) + + snippet_start_line = None + snippet_name = None + + return snippets + + +def main(args: argparse.Namespace) -> None: + src_dir = args.src_dir + target_dir = args.target_dir + + logging.info(f'--src-dir="{src_dir}"') + logging.info(f'--target-dir="{target_dir}"') + + assert os.path.isdir(src_dir) + + if args.remove_prev_target_dir and os.path.exists(target_dir): + logging.info(f'Script launched with --remove-prev-target-dir flag') + logging.info(f'Removing --target-dir="{target_dir}"') + shutil.rmtree(target_dir) + + if not os.path.exists(target_dir): + logging.info( + f'--target-dir="{target_dir}" does not exist, creating') + os.makedirs(target_dir, exist_ok=False) + assert os.path.isdir( + target_dir), f'Failed to create --target-dir: "{target_dir}"' + + snippets = [] + + for subdir, _, files in os.walk(src_dir): + for filename in files: + if filename.lower().endswith('.md'): + filepath = os.path.join(subdir, filename) + logging.debug(f'Extracting snippets from {filename}') + snippets.extend(extract_snippets(filepath)) + + n_snippets = len(snippets) + for snippet_idx, snippet in enumerate(snippets, start=1): + logging.debug( + f'({snippet_idx}/{n_snippets}) writing snippet {snippet.name} to "{target_dir}"') + write_snippet(target_dir, snippet) + + logging.info( + f'All done, {n_snippets} snippets have been written to "{target_dir}"') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Recursively extracts specially annotation cpp code snippets from src dir with .md files') + + parser.add_argument('--src-dir', type=str, required=True, + help='path to the directory with .md source to recursively look for cpp snippets with {.cpp file=...} annotation') + parser.add_argument('--target-dir', type=str, required=True, + help='path to the resulting directory with .h snippets extracted from src-dir') + parser.add_argument('--remove-prev-target-dir', action='store_true', + help='remove --target-dir prior to generating snippets') + + logging_level_names = list(logging.getLevelNamesMapping().keys()) + assert 'INFO' in logging_level_names + parser.add_argument('--logging-level', type=str, choices=logging_level_names, + default='INFO', help='script logging level') + + args = parser.parse_args() + + logging.basicConfig( + stream=sys.stdout, + format='%(asctime)s %(module)-15s - [%(levelname)-6s] - %(message)s', + datefmt='%H:%M:%S', + level=args.logging_level + ) + + main(args) diff --git a/test/src/test_2sat_new.cpp b/test/src/test_2sat_new.cpp new file mode 100644 index 000000000..bcf6af421 --- /dev/null +++ b/test/src/test_2sat_new.cpp @@ -0,0 +1,53 @@ +#include +#include + +#include +using namespace std; + +#include "snippets/2sat.h" + +namespace +{ +TEST(TwoSAT, ExampleUsage) +{ + TwoSatSolver::example_usage(); +} + +TEST(TwoSAT, ArticleExample) +{ + TwoSatSolver solver(3); // a, b, c + solver.add_disjunction(0, false, 1, true); // a v not b + solver.add_disjunction(0, true, 1, false); // not a v b + solver.add_disjunction(0, true, 1, true); // not a v not b + solver.add_disjunction(0, false, 2, true); // a v not c + EXPECT_TRUE(solver.solve_2SAT()); + auto expected = vector{{false, false, false}}; + EXPECT_EQ(solver.assignment, expected); +} + +TEST(TwoSAT, Unsatisfiable) +{ + TwoSatSolver solver(2); // a, b + solver.add_disjunction(0, false, 1, false); // a v b + solver.add_disjunction(0, false, 1, true); // a v not b + solver.add_disjunction(0, true, 1, false); // not a v b + solver.add_disjunction(0, true, 1, true); // not a v not b + EXPECT_FALSE(solver.solve_2SAT()); +} + +TEST(TwoSAT, OtherSatisfiableExample) +{ + TwoSatSolver solver(4); // a, b, c, d + solver.add_disjunction(0, false, 1, true); // a v not b + solver.add_disjunction(0, true, 2, true); // not a v not c + solver.add_disjunction(0, false, 1, false); // a v b + solver.add_disjunction(3, false, 2, true); // d v not c + solver.add_disjunction(3, false, 0, true); // d v not a + EXPECT_TRUE(solver.solve_2SAT()); + // two solutions + auto expected_1 = vector{{true, true, false, true}}; + auto expected_2 = vector{{true, false, false, true}}; + EXPECT_THAT(solver.assignment, ::testing::AnyOf(expected_1, expected_2)); +} + +} // namespace diff --git a/test/src/test_aho_korasick_new.cpp b/test/src/test_aho_korasick_new.cpp new file mode 100644 index 000000000..53eab9708 --- /dev/null +++ b/test/src/test_aho_korasick_new.cpp @@ -0,0 +1,61 @@ +#include + +#include +#include +#include +using namespace std; + +namespace { + +namespace Trie { +#include "snippets/aho_corasick_trie_definition.h" +#include "snippets/aho_corasick_trie_add.h" +} // namespace Trie + +namespace Automaton { +#include "snippets/aho_corasick_automaton.h" +} // namespace Automation + +TEST(AhoKorasick, TrieAddString) +{ + using namespace Trie; + + vector set = {"a", "to", "tea", "ted", "ten", "i", "in", "inn"}; + for (string s : set) { + add_string(s); + } + + EXPECT_EQ(trie.size(), 11); +} + +TEST(AhoKorasick, TrieAutomation) +{ + using namespace Automaton; + + vector set = {"a", "ab", "bab", "bc", "bca", "c", "caa"}; + for (string s : set) { + add_string(s); + } + EXPECT_EQ(t.size(), 11); + + int v = 0; + v = go(v, 'a'); + EXPECT_TRUE(t[v].output); + v = go(v, 'b'); + EXPECT_TRUE(t[v].output); + v = go(v, 'c'); + EXPECT_TRUE(t[v].output); + v = go(v, 'd'); + EXPECT_FALSE(t[v].output); + EXPECT_EQ(v, 0); + v = go(v, 'b'); + EXPECT_FALSE(t[v].output); + v = go(v, 'a'); + EXPECT_FALSE(t[v].output); + v = go(v, 'a'); + EXPECT_TRUE(t[v].output); + v = go(v, 'b'); + EXPECT_TRUE(t[v].output); +} + +} // namespace pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy