Files
aoc/sol/24/day03.cpp
2024-12-08 22:35:49 +01:00

446 lines
13 KiB
C++

#include <fstream>
#include <expected>
#include <string>
#include <ranges>
#include <numeric>
#include <vector>
#include "aoc.hpp"
#include "aoc/utils.hpp"
#include "fmt/format.h"
#include "ctre.hpp"
namespace npr {
#define ENUMERATOR_AOC_TOKENS \
ENUMERATOR_AOC_TOKEN(mul , operator_ ) \
ENUMERATOR_AOC_TOKEN(invalid , invalid ) \
ENUMERATOR_AOC_TOKEN(numeric_literal, number ) \
ENUMERATOR_AOC_TOKEN(newline , punctuation) \
ENUMERATOR_AOC_TOKEN(paren_open , punctuation) \
ENUMERATOR_AOC_TOKEN(paren_close , punctuation) \
ENUMERATOR_AOC_TOKEN(comma , punctuation) \
ENUMERATOR_AOC_TOKEN(identifier , identifier )
#define ENUMERATOR_AOC_CATEGORIES \
ENUMERATOR_AOC_CATEGORY(operator_) \
ENUMERATOR_AOC_CATEGORY(invalid) \
ENUMERATOR_AOC_CATEGORY(number) \
ENUMERATOR_AOC_CATEGORY(punctuation) \
ENUMERATOR_AOC_CATEGORY(identifier)
#define ENUMERATOR_AOC_NODE_TYPES \
ENUMERATOR_AOC_NODE_TYPE(numeric_literal) \
ENUMERATOR_AOC_NODE_TYPE(call_expression) \
ENUMERATOR_AOC_NODE_TYPE(binary_expression)
enum class token_type : std::uint32_t {
#define ENUMERATOR_AOC_TOKEN(type, category) type,
ENUMERATOR_AOC_TOKENS
#undef ENUMERATOR_AOC_TOKEN
_count
};
enum class token_category : std::uint32_t {
#define ENUMERATOR_AOC_CATEGORY(type) type,
ENUMERATOR_AOC_CATEGORIES
#undef ENUMERATOR_AOC_CATEGORY
_count
};
enum class node_type : std::uint32_t {
#define ENUMERATOR_AOC_NODE_TYPE(type) type,
ENUMERATOR_AOC_NODE_TYPES
#undef ENUMERATOR_AOC_NODE_TYPE
_count
};
auto token_type_str(token_type type) -> char const* {
switch (type) {
using enum token_type;
#define ENUMERATOR_AOC_TOKEN(type, category) case type: return #type;
ENUMERATOR_AOC_TOKENS
#undef ENUMERATOR_AOC_TOKEN
default: return "invalid";
}
}
auto token_type_category(token_type type) -> token_category {
switch (type) {
using enum token_category;
#define ENUMERATOR_AOC_TOKEN(type, category) case token_type::type: return category;
ENUMERATOR_AOC_TOKENS
#undef ENUMERATOR_AOC_TOKEN
default: return token_category::invalid;
}
}
auto node_type_str(node_type type) -> char const* {
switch (type) {
using enum node_type;
#define ENUMERATOR_AOC_NODE_TYPE(type) case type: return #type;
ENUMERATOR_AOC_NODE_TYPES
#undef ENUMERATOR_AOC_NODE_TYPE
default: return "invalid";
}
}
class token {
public:
token(std::string const& str, token_type type, token_category category, std::size_t row, std::size_t col)
: m_type(type)
, m_category(category)
, m_value(str)
, m_row(row)
, m_column(col) { }
auto type() const -> token_type { return m_type; }
auto category() const -> token_category { return m_category; }
auto value() const -> std::string const& { return m_value; }
auto row() const -> std::size_t { return m_row; }
auto col() const -> std::size_t { return m_column; }
auto str() const -> std::string {
using namespace std::string_literals;
std::string str{"token {"};
str += " value: \""s + m_value + "\","s;
str += " type: "s + token_type_str(m_type) + ","s;
str += " row: "s + std::to_string(m_row) + ","s;
str += " col: "s + std::to_string(m_column);
str += " }";
return str;
}
public:
inline static auto is_identifier(std::string_view const& str) -> bool {
return ctre::match<"^[a-z']+$">(str);
}
inline static auto is_numeric_literal(std::string_view const& str) -> bool {
return ctre::match<"^[0-9]+$">(str);
}
private:
token_type m_type;
token_category m_category;
std::string m_value;
std::size_t m_row;
std::size_t m_column;
};
enum class lexer_error {
eof,
unknown
};
class lexer {
public:
lexer(std::string const& source)
: m_strm(source, std::ios::in | std::ios::binary)
, m_line(1), m_col(1) {
}
auto tokenize() -> std::vector<token> {
std::vector<token> tokens{};
auto tk = next_token();
while (tk) {
tokens.emplace_back(std::move(tk.value()));
tk = next_token();
}
return tokens;
}
private:
auto next_token() -> std::optional<token> {
if (!has_next()) return {};
std::string str{};
auto const col = m_col;
if (peek() == '\n') {
peek_consume();
m_line = m_line + 1;
str += "\\n";
m_col = 1;
auto const& type = token_type::invalid;
return token(str, type, token_type_category(type), m_line, col);
}
// mul, do, don't identifier
if (peek() == 'm' || peek() == 'd') {
auto const is_valid_identifier_char = [](auto const c) {
return (c >= 'a' && c <= 'z') || c == '\'';
};
while (is_valid_identifier_char(peek())) str += peek_consume();
auto const check_type = [](auto const str) {
if (!token::is_identifier(str)) return token_type::invalid;
return token_type::identifier;
};
auto const& type = check_type(str);
return token(str, type, token_type_category(type), m_line, col);
}
if (peek() == '(') {
str += peek_consume();
return token{
str,
token_type::paren_open,
token_type_category(token_type::paren_open),
m_line,
col
};
}
if (peek() == ')') {
str += peek_consume();
return token{
str,
token_type::paren_close,
token_type_category(token_type::paren_close),
m_line,
col
};
}
if (peek() == ',') {
str += peek_consume();
return token{
str,
token_type::comma,
token_type_category(token_type::comma),
m_line,
col
};
}
if (std::isdigit(peek())) {
while(std::isdigit(peek())) str += peek_consume();
auto const& type = token::is_numeric_literal(str) ? token_type::numeric_literal : token_type::invalid;
return token(str, type, token_type_category(type), m_line, col);
}
if (!has_next()) return {};
str += peek_consume();
return token{
str,
token_type::invalid,
token_type_category(token_type::invalid),
m_line,
col
};
}
auto peek() -> char {
return static_cast<char>(m_strm.peek());
}
auto peek_consume() -> char {
++m_col;
return static_cast<char>(m_strm.get());
}
auto has_next() const -> bool {
return !m_strm.eof() && m_strm.good();
}
private:
std::fstream m_strm;
std::size_t m_line;
std::size_t m_col;
};
class node {
public:
node(node_type type,
token const& token,
std::vector<node> const& nodes = {})
: m_type(type)
, m_token(token)
, m_nodes(nodes) { }
auto type() const -> node_type { return m_type; }
auto token() const -> npr::token const& { return m_token; }
auto nodes() const -> std::vector<node> const& { return m_nodes; }
auto value() const -> std::string const& { return m_token.value(); }
auto add_node(npr::node const& node) -> void {
m_nodes.push_back(node);
}
auto str() const -> std::string {
using namespace std::string_literals;
std::string str{node_type_str(m_type) + " {"s};
str += " value: "s + m_token.value();
switch (m_type) {
case node_type::call_expression:
str += call_expression_str();
break;
default: break;
}
str += " }";
return str;
}
private:
auto call_expression_str() const -> std::string {
if (m_nodes.size() == 0) return "";
using namespace std::string_literals;
std::string str{", ["};
for (std::size_t i = 0; i < m_nodes.size(); ++i) {
str += " "s + std::to_string(i) + ": "s + m_nodes[i].str();
if (i < m_nodes.size() - 1) str += ",";
}
str += " ]";
return str;
}
private:
node_type m_type;
npr::token m_token;
std::vector<node> m_nodes;
};
class parser {
public:
parser() : m_cursor(0), m_tokens() { }
auto parse(std::vector<token> const& tokens) -> std::vector<node> {
m_cursor = 0;
m_tokens = tokens;
std::vector<node> nodes{};
do {
auto n = parse_statement();
if (n.has_value()) nodes.push_back(n.value());
} while(has_next());
return nodes;
}
private:
auto parse_statement() -> std::optional<node> {
auto const type = peek().type();
switch (type) {
case token_type::identifier: {
if (!has_next()) return {};
auto next = peek_next();
if (next.type() == token_type::paren_open) {
return parse_call_expression();
}
}
default:
break;
}
consume();
return {};
}
auto parse_call_expression() -> std::optional<node> {
auto const& token_callee = peek();
node callee{node_type::call_expression, token_callee};
consume();
return parse_args(callee);
}
auto parse_args(npr::node callee) -> std::optional<node> {
if (peek().type() != token_type::paren_open) return {};
consume();
while (has_next()) {
auto const& arg_token = peek();
if (arg_token.type() == token_type::numeric_literal) {
callee.add_node({node_type::numeric_literal, arg_token});
consume();
continue;
}
if (arg_token.type() == token_type::comma) {
consume();
continue;
}
if (arg_token.type() == token_type::paren_close) {
consume();
break;
}
return {};
}
return callee;
}
private:
auto peek() const -> token const& {
return m_tokens[m_cursor];
}
auto peek_next() const -> token const& {
return m_tokens[m_cursor + 1];
}
auto consume() -> void {
if (m_cursor >= m_tokens.size()) return;
++m_cursor;
}
auto has_next(std::size_t i = 0) const -> bool {
return m_cursor + i < m_tokens.size();
}
private:
std::size_t m_cursor;
std::vector<token> m_tokens;
};
}
auto aoc24::day03([[maybe_unused]]std::span<char const*> const& args) -> std::expected<void, aoc::error> {
npr::lexer lexer{"./dat/24/re/03.txt"};
npr::parser parser{};
auto const tokens = lexer.tokenize();
auto const nodes = parser.parse(tokens);
// for (auto const& token : tokens) {
// fmt::print("{}\n", token.str());
// }
//
// for (auto const& node : nodes) {
// fmt::print("{}\n", node.str());
// }
auto opa = nodes | std::views::filter([](auto const& node) {
return node.value() == "mul";
}) | std::views::transform([](auto const& node) {
auto const a = std::stoi(node.nodes()[0].value());
auto const b = std::stoi(node.nodes()[1].value());
return a * b;
});
auto const sum = std::accumulate(std::begin(opa), std::end(opa), 0);
fmt::print("Part A: {}\n", sum);
std::vector<npr::node> program{};
bool is_do = true;
for (auto const& node : nodes) {
if (node.value() == "do") {
is_do = true;
continue;
}
if (node.value() == "don't") {
is_do = false;
continue;
}
if (is_do) program.push_back(node);
}
auto opb = program | std::views::filter([](auto const& node) {
return node.value() == "mul";
}) | std::views::transform([](auto const& node) {
auto const a = std::stoi(node.nodes()[0].value());
auto const b = std::stoi(node.nodes()[1].value());
return a * b;
});
auto const sumb = std::accumulate(std::begin(opb), std::end(opb), 0);
fmt::print("Part B: {}\n", sumb);
return {};
}