Compare commits

...

7 Commits

Author SHA1 Message Date
6df1e1c3ec Changement d'ordi! (Non le jour 12 n'est pas terminé) 2023-12-13 08:08:13 +01:00
a9d7c6a101 day 12 part 1 completed 2023-12-12 18:05:42 +01:00
f234fc62bf Added template + day12 in progress 2023-12-12 08:06:58 +01:00
ed55a50ec7 Day 11 completed 2023-12-11 11:15:05 +01:00
64a270ee39 Day 10 completed
completed with the help of shapely python module

other minors changes in day5
2023-12-10 17:34:02 +01:00
cdaa539b79 Refinement in tests Output
I use logging.debug, now
2023-12-09 10:21:55 +01:00
f641fbac07 Day 9 Completed
put example_input.txt back in repos, for unittest purposes
2023-12-09 09:35:56 +01:00
41 changed files with 980 additions and 16 deletions

4
.gitignore vendored
View File

@@ -160,5 +160,7 @@ cython_debug/
#.idea/ #.idea/
**/*Zone.Identifier **/*Zone.Identifier
**/*input* **/input*
cookies.txt cookies.txt
.vscode/

173
day10/common.py Normal file
View File

@@ -0,0 +1,173 @@
from __future__ import annotations
from dataclasses import dataclass
from collections import defaultdict
import logging
from typing import NamedTuple, TypeGuard
from enum import Enum
import sys
class Point(NamedTuple):
x: int
y: int
c: str
def add_delta(self, delta: PointDelta, labyrinth: Labyrinth) -> Point | None:
if (0 <= self.x + delta.x < len(labyrinth[0])) and (
0 <= self.y + delta.y < len(labyrinth)
):
return labyrinth[self.y + delta.y][self.x + delta.x]
return None
class PointDelta(NamedTuple):
x: int
y: int
class Deltas(Enum):
UP = PointDelta(0, -1)
DOWN = PointDelta(0, 1)
LEFT = PointDelta(-1, 0)
RIGHT = PointDelta(1, 0)
NONE = PointDelta(0, 0)
NO_MOVEMENT = (Deltas.NONE, Deltas.NONE)
PIPES = defaultdict(
lambda: NO_MOVEMENT,
[
("|", (Deltas.UP, Deltas.DOWN)),
("-", (Deltas.LEFT, Deltas.RIGHT)),
("L", (Deltas.UP, Deltas.RIGHT)),
("J", (Deltas.UP, Deltas.LEFT)),
("7", (Deltas.DOWN, Deltas.LEFT)),
("F", (Deltas.DOWN, Deltas.RIGHT)),
],
)
@dataclass(init=False)
class Labyrinth:
_data: tuple[tuple[Point, ...], ...] # stored as [Y][X]
start: Point
first_points: tuple[Point, ...]
def __init__(
self,
data: tuple[tuple[Point, ...], ...],
start: Point,
first_points: tuple[Point, ...] | None = None,
):
self._data = data
self.start = start
self.first_points = (
self._find_first_points() if first_points is None else first_points
)
logging.debug(("start", start))
logging.debug(("first_points", self.first_points))
def __getitem__(self, key: int) -> tuple[Point, ...]:
return self._data[key]
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def _find_first_points(self) -> tuple[Point, ...]:
up = self.start.add_delta(Deltas.UP.value, self)
up = up if up is not None and Deltas.DOWN in PIPES[up.c] else None
down = self.start.add_delta(Deltas.DOWN.value, self)
down = down if down is not None and Deltas.UP in PIPES[down.c] else None
left = self.start.add_delta(Deltas.LEFT.value, self)
left = left if left is not None and Deltas.RIGHT in PIPES[left.c] else None
right = self.start.add_delta(Deltas.RIGHT.value, self)
right = right if right is not None and Deltas.LEFT in PIPES[right.c] else None
def point_is_not_none(n: Point | None) -> TypeGuard[Point]: # makes mypy happy
return n is not None
return tuple(filter(point_is_not_none, [up, down, left, right]))
def get_labyrinth(self):
return self._data
def get_path(self) -> list[Point]:
path = [self.start]
previous_point = self.start
current_point = self.first_points[0]
while current_point != self.start:
path.append(current_point)
new_point = movement(current_point, previous_point, self)
previous_point = current_point
current_point = new_point
logging.debug(current_point)
path.append(current_point)
return path
def get_empty_points(self) -> list[Point]:
empty_points = []
for line in self._data:
for point in line:
if point.c == ".":
empty_points.append(point)
return empty_points
def clean(self) -> Labyrinth:
path = self.get_path()
new_data = []
for y, line in enumerate(self._data):
new_line = []
for x, point in enumerate(line):
if point not in path:
new_line.append(Point(x, y, "."))
else:
new_line.append(point)
new_data.append(tuple(new_line))
return Labyrinth(new_data, self.start, self.first_points)
def show(self, descriptor=sys.stdout) -> None:
for line in self._data:
linestr = "".join(map(lambda p: p.c, line)) + "\n"
descriptor.write(linestr)
def movement(
current_point: Point, previous_point: Point, labyrinth: Labyrinth
) -> Point:
movement_type = PIPES[current_point.c]
if movement_type == NO_MOVEMENT:
return current_point
movement_value = tuple(x.value for x in movement_type)
diff = PointDelta(
previous_point.x - current_point.x, previous_point.y - current_point.y
)
if diff not in movement_value:
return current_point
idx = 0 if movement_value.index(diff) == 1 else 1
return labyrinth[current_point.y + movement_value[idx].y][
current_point.x + movement_value[idx].x
]
def parse(input_file: str) -> Labyrinth:
logging.debug(f"parsing {input_file}")
array = []
start_point = Point(-1, -1, "")
line_no = 0
with open(input_file, "r", encoding="UTF-8") as input_handle:
while line := input_handle.readline().rstrip("\n").rstrip():
ar_line = []
for col_no, char in enumerate(line):
new_point = Point(col_no, line_no, char)
logging.debug(f"new point: {new_point}")
ar_line.append(new_point)
if char == "S":
start_point = new_point
array.append(tuple(ar_line))
line_no += 1
return Labyrinth(data=tuple(array), start=start_point)

View File

@@ -0,0 +1,5 @@
7-F7-
.FJ|7
SJLL7
|F--J
LJ.LJ

View File

@@ -0,0 +1,9 @@
...........
.S-------7.
.|F-----7|.
.||.....||.
.||.....||.
.|L-7.F-J|.
.|..|.|..|.
.L--J.L--J.
...........

34
day10/part1.py Executable file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3.11
from collections.abc import Iterable
from common import *
def all_equals(points: list[Point]) -> bool:
first = points[0]
logging.debug(f"testing {points} with value {first}")
return all(map(lambda x: x == first, points))
def farthest_length(labyrinth: Labyrinth) -> int:
length = 1
current_vectors = list(
((labyrinth.start, point) for point in labyrinth.first_points)
)
logging.debug(f"first vectors: {current_vectors}")
current_points = list(vector[1] for vector in current_vectors)
logging.debug(current_points)
while not all_equals(current_points):
new_vectors = []
new_points = []
for vector in current_vectors:
new_vector = (vector[1], movement(vector[1], vector[0], labyrinth))
new_vectors.append(new_vector)
new_points.append(new_vector[1])
current_vectors = new_vectors
current_points = new_points
length += 1
return length
if __name__ == "__main__":
print(farthest_length(parse("input.txt")))

27
day10/part2.py Executable file
View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python3.11
from common import *
import shapely
def inside_points(labyrinth: Labyrinth) -> list[Point]:
labyrinth = labyrinth.clean()
path = labyrinth.get_path()
empty_points = labyrinth.get_empty_points()
logging.debug("let's shapely that")
shp_path = []
for point in path:
shp_path.append(shapely.Point(point.x, point.y))
polygon = shapely.Polygon(shp_path)
inside_points = []
for point in empty_points:
current = shapely.Point(point.x, point.y)
if shapely.contains(polygon, current):
inside_points.append(point)
return inside_points
if __name__ == "__main__":
labyrinth = parse("input.txt")
print(len(inside_points(labyrinth)))

32
day10/test.py Executable file
View File

@@ -0,0 +1,32 @@
#!/usr/bin/env python3.11
import logging
from unittest import TestCase, main
from part1 import farthest_length
from part2 import inside_points
from common import parse, Point
class Day10Tests(TestCase):
def test_parsing(self):
labyrinth = parse("example_input_part1.txt")
self.assertEqual(labyrinth.start, Point(0, 2, "S"))
self.assertEqual(labyrinth.first_points, (Point(0, 3, "|"), Point(1, 2, "J")))
def test_part1(self):
self.assertEqual(8, farthest_length(parse("example_input_part1.txt")))
def test_part2(self):
self.assertEqual(
[
Point(x=2, y=6, c="."),
Point(x=3, y=6, c="."),
Point(x=7, y=6, c="."),
Point(x=8, y=6, c="."),
],
inside_points(parse("example_input_part2.txt")),
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main(verbosity=2)

139
day11/common.py Normal file
View File

@@ -0,0 +1,139 @@
from __future__ import annotations
from dataclasses import dataclass
import logging
from itertools import combinations
# from pprint import pformat
from typing import NamedTuple
class Coordinate(NamedTuple):
x: int
y: int
c: str
def is_galaxy(self):
return self.c == "#"
def distance(
self, other: Coordinate, universe: Universe | None = None, factor: int = 1
) -> int:
current = self
movements = 0
while current != other:
if current.x < other.x:
movements += 1
current = Coordinate(current.x + 1, current.y, current.c)
elif current.x > other.x:
movements += 1
current = Coordinate(current.x - 1, current.y, current.c)
if universe is not None:
if current.x in universe.empty_cols:
movements += -1 + factor
if current.y < other.y:
movements += 1
current = Coordinate(current.x, current.y + 1, current.c)
elif current.y > other.y:
movements += 1
current = Coordinate(current.x, current.y - 1, current.c)
if universe is not None:
if current.y in universe.empty_lines:
movements += -1 + factor
logging.debug(f"distance between {self} and {other} = {movements}")
return movements
def pairs_of_stars(universe: Universe) -> list[tuple[Coordinate, Coordinate]]:
return list(combinations(universe.stars, 2))
@dataclass(init=False)
class Universe:
_data: list[list[Coordinate]]
stars: list[Coordinate]
empty_lines: list[int] | None
empty_cols: list[int] | None
def __init__(self, data: list[list[Coordinate]], part: int = 1):
self._data = data
if part == 1:
self._expand_universe()
self.stars = []
stars = [filter(lambda c: c.is_galaxy(), line) for line in self._data]
for line in stars:
self.stars.extend(line)
if part == 2:
self.empty_cols = []
self.empty_lines = []
for x in range(len(self._data[0])):
if all(
map(lambda c: not c.is_galaxy(), [line[x] for line in self._data])
):
self.empty_cols.append(x)
for line in self._data:
if all(map(lambda c: not c.is_galaxy(), line)):
self.empty_lines.append(line[0].y)
def __getitem__(self, key: int) -> list[Coordinate]:
return self._data[key]
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def _expand_universe(self) -> None:
# universe expansion on x
# first find columns with no galaxy
empty_cols: list[int] = []
for x in range(len(self._data[0])):
if all(map(lambda c: not c.is_galaxy(), [line[x] for line in self._data])):
empty_cols.append(x)
for y, line in enumerate(self._data):
x_offset = 0
new_line = []
for col in line:
if x_offset > 0:
new_col = Coordinate(col.x + x_offset, col.y, col.c)
new_line.append(new_col)
else:
new_line.append(col)
if col.x in empty_cols:
x_offset += 1
new_col = Coordinate(col.x + x_offset, col.y, col.c)
new_line.append(new_col)
self._data[y] = new_line
# universe expansion on y
new_universe = []
y_offset = 0
for y, line in enumerate(self._data):
if y_offset > 0:
new_line = [Coordinate(o.x, o.y + y_offset, o.c) for o in line]
new_universe.append(new_line)
else:
new_universe.append(line)
if all(map(lambda c: not c.is_galaxy(), line)):
y_offset += 1
new_line = [Coordinate(o.x, o.y + y_offset, o.c) for o in line]
new_universe.append(new_line)
self._data = new_universe
# logging.debug(pformat(self._data))
def get_map(self):
return self._data
def parse(input_file: str, part: int = 1) -> Universe:
data = []
y = 0
with open(input_file, "r", encoding="utf-8") as input_fd:
while line := input_fd.readline():
data.append(
[Coordinate(x, y, c) for x, c in enumerate(line.rstrip("\n").rstrip())]
)
y += 1
# logging.debug(pformat(data))
return Universe(data, part)

10
day11/example_input.txt Normal file
View File

@@ -0,0 +1,10 @@
...#......
.......#..
#.........
..........
......#...
.#........
.........#
..........
.......#..
#...#.....

14
day11/part1.py Executable file
View File

@@ -0,0 +1,14 @@
#!/usr/bin/env python3.11
from common import *
def part1(lpairs_of_stars: list[tuple[Coordinate, Coordinate]]) -> int:
logging.debug(f"pairs={lpairs_of_stars}")
distances = list(map(lambda gs: gs[0].distance(gs[1]), lpairs_of_stars))
logging.debug(f"distances={distances}")
return sum(distances)
if __name__ == "__main__":
# logging.basicConfig(level=logging.DEBUG)
print(part1(pairs_of_stars(parse("input.txt"))))

15
day11/part2.py Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python3.11
from common import *
def part2(universe: Universe, factor: int) -> int:
pairs = pairs_of_stars(universe)
logging.debug(f"pairs={pairs}")
distances = list(map(lambda gs: gs[0].distance(gs[1], universe, factor), pairs))
logging.debug(f"distances={distances}")
return sum(distances)
if __name__ == "__main__":
print(part2(parse("input.txt", part=2), 1000000))

64
day11/test.py Executable file
View File

@@ -0,0 +1,64 @@
#!/usr/bin/env python3.11
import logging
from unittest import TestCase, main
from common import Coordinate, parse, pairs_of_stars
from part1 import part1
from part2 import part2
class Day11Test(TestCase):
def test_parse(self):
universe_map = [
[Coordinate(x, 0, ".") for x in range(4)]
+ [Coordinate(4, 0, "#")]
+ [Coordinate(x, 0, ".") for x in range(5, 13)],
[Coordinate(x, 1, ".") for x in range(9)]
+ [Coordinate(9, 1, "#")]
+ [Coordinate(x, 1, ".") for x in range(10, 13)],
[Coordinate(0, 2, "#")] + [Coordinate(x, 2, ".") for x in range(1, 13)],
[Coordinate(x, 3, ".") for x in range(13)],
[Coordinate(x, 4, ".") for x in range(13)],
[Coordinate(x, 5, ".") for x in range(8)]
+ [Coordinate(8, 5, "#")]
+ [Coordinate(x, 5, ".") for x in range(9, 13)],
[Coordinate(0, 6, "."), Coordinate(1, 6, "#")]
+ [Coordinate(x, 6, ".") for x in range(2, 13)],
[Coordinate(x, 7, ".") for x in range(12)] + [Coordinate(12, 7, "#")],
[Coordinate(x, 8, ".") for x in range(13)],
[Coordinate(x, 9, ".") for x in range(13)],
[Coordinate(x, 10, ".") for x in range(9)]
+ [Coordinate(9, 10, "#")]
+ [Coordinate(x, 10, ".") for x in range(10, 13)],
[Coordinate(0, 11, "#")]
+ [Coordinate(x, 11, ".") for x in range(1, 5)]
+ [Coordinate(5, 11, "#")]
+ [Coordinate(x, 11, ".") for x in range(6, 13)],
]
stars = [
Coordinate(4, 0, "#"),
Coordinate(9, 1, "#"),
Coordinate(0, 2, "#"),
Coordinate(8, 5, "#"),
Coordinate(1, 6, "#"),
Coordinate(12, 7, "#"),
Coordinate(9, 10, "#"),
Coordinate(0, 11, "#"),
Coordinate(5, 11, "#"),
]
universe = parse("example_input.txt")
self.assertEqual(universe_map, universe.get_map())
self.assertEqual(stars, universe.stars)
def test_part1(self):
self.assertEqual(374, part1(pairs_of_stars(parse("example_input.txt"))))
def test_part2(self):
self.assertEqual(1030, part2(parse("example_input.txt", part=2), factor=10))
self.assertEqual(8410, part2(parse("example_input.txt", part=2), factor=100))
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main(verbosity=2)

72
day12/common.py Normal file
View File

@@ -0,0 +1,72 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import NamedTuple, NewType, Self
import re
import logging
def im_replace(line: str, mask: str) -> str:
idx = 0
new_line = ""
for c in line:
if c != "?":
new_line += c
else:
new_line += mask[idx]
idx += 1
return new_line
class SpringLine(NamedTuple):
line: str
groups: tuple[int, ...]
def arrangments(self) -> list[str]:
count_im = self.line.count("?")
logging.debug(count_im)
possibles = []
for i in range(pow(2, count_im)):
debug_str = ""
rep_str = f"{i:b}".rjust(count_im, "0").replace("0", ".").replace("1", "#")
debug_str += rep_str + " / "
tentative = im_replace(self.line, rep_str)
possible = SpringLine(tentative, self.groups)
debug_str += str(possible) + ": "
if possible.is_valid():
possibles.append(im_replace(self.line, rep_str))
debug_str += "valid"
else:
debug_str += "invalid"
logging.debug(debug_str)
return possibles
def is_valid(self) -> bool:
pattern = "^\.*" + "\.+".join(["#" * i for i in self.groups]) + "\.*$"
# logging.debug(pattern)
return re.match(pattern, self.line)
def unfold(self) -> SpringLine:
new_line = []
new_groups = []
for i in range(5):
new_line.append(self.line)
new_groups += self.groups
return SpringLine("?".join(new_line), new_groups)
Spring = NewType("Spring", list[SpringLine])
def parse(input_file: str) -> Spring:
spring_lines = []
with open(input_file, "r", encoding="utf-8") as inputfd:
while line := inputfd.readline():
if match := re.match("([?#.]+)\s([0-9,]+)", line):
# logging.debug(match.group(0))
spring_lines.append(
SpringLine(
match.group(1),
tuple(map(lambda c: int(c), match.group(2).split(","))),
)
)
return Spring(spring_lines)

6
day12/example_input.txt Normal file
View File

@@ -0,0 +1,6 @@
???.### 1,1,3
.??..??...?##. 1,1,3
?#?#?#?#?#?#?#? 1,3,1,6
????.#...#... 4,1,1
????.######..#####. 1,6,5
?###???????? 3,2,1

13
day12/part1.py Executable file
View File

@@ -0,0 +1,13 @@
#!/usr/bin/env python3.11
from common import *
def part1(input_file: str) -> int:
spring = parse(input_file)
#interm = list(map(lambda s: s.arrangments(), spring))
#interm = list(map(len, interm))
return sum(list(map(len, map(lambda s: s.arrangments(), spring))))
if __name__=="__main__":
print(part1("input.txt"))

8
day12/part2.py Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env python3.11
from common import *
def part2(input_file: str) -> int:
spring = parse(input_file)
return sum(list(map(len, map(lambda s: s.unfold().arrangments(), spring))))

32
day12/test.py Executable file
View File

@@ -0,0 +1,32 @@
#!/usr/bin/env python3.11
import logging
from unittest import TestCase, main
from common import *
from part1 import part1
from part2 import part2
class Day12Tests(TestCase):
def test_parsing(self):
parsed_spring = parse("example_input.txt")
spring: Spring = [
SpringLine("???.###", (1, 1, 3)),
SpringLine(".??..??...?##.", (1, 1, 3)),
SpringLine("?#?#?#?#?#?#?#?", (1, 3, 1, 6)),
SpringLine("????.#...#...", (4, 1, 1)),
SpringLine("????.######..#####.", (1, 6, 5)),
SpringLine("?###????????", (3, 2, 1)),
]
self.assertEqual(parsed_spring, spring)
def test_part1(self):
self.assertEqual(part1("example_input.txt"), 21)
def test_part2(self):
self.assertEqual(part2("example_input.txt"), 525152)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main(verbosity=2)

35
day13/common.py Normal file
View File

@@ -0,0 +1,35 @@
from dataclasses import dataclass
from collections.abc import Iterator
@dataclass(init=False)
class MirrorField:
_data: list[str]
def __getitem__(self, key: int) -> str:
return self._data[key]
def __len__(self) -> int:
return len(self._data)
def __iter__(self) -> Iterator[str]:
return iter(self._data)
def iter_horizontal(self) -> Iterator[str]:
return iter(self)
def iter_vertical(self) -> Iterator[str]:
x = 0
while x < len(self._data[0]):
the_str = ""
for line in self._data:
the_str += line[x]
yield the_str
x += 1
def find_symetry(self) -> tuple[str, int] | None:
"""Return a tuple indicating a symetry horizontal (h) or vertical (v) and the first row or column index"""
def parse(input_file: str) -> MirrorField:
return MirrorField()

15
day13/example_input.txt Normal file
View File

@@ -0,0 +1,15 @@
#.##..##.
..#.##.#.
##......#
##......#
..#.##.#.
..##..##.
#.#.##.#.
#...##..#
#....#..#
..##..###
#####.##.
#####.##.
..##..###
#....#..#

7
day13/part1.py Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env python3.11
from common import *
def part1(input_file: str) -> int:
return 0

7
day13/part2.py Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env python3.11
from common import *
def part2(input_file: str) -> int:
return 0

23
day13/test.py Executable file
View File

@@ -0,0 +1,23 @@
#!/usr/bin/env python3.11
from unittest import TestCase, main
import logging
from common import *
from part1 import part1
from part2 import part2
class DayXTests(TestCase):
def test_parsing(self):
pass
def test_part1(self):
self.assertEqual(405, part1("example_input.txt"))
def test_part2(self):
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main(verbosity=2)

View File

@@ -1,6 +1,7 @@
import re import re
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional, Dict, List, Tuple from typing import Optional, Dict, List, Tuple, OrderedDict as OrderedDictType
from collections import OrderedDict
@dataclass @dataclass
@@ -18,17 +19,17 @@ class AlmanacMap:
return self.destination + (source - self.source) return self.destination + (source - self.source)
def extract(input_file: str) -> Tuple[List[int], Dict[str, List[AlmanacMap]]]: def extract(input_file: str) -> Tuple[List[int], OrderedDictType[str, List[AlmanacMap]]]:
seeds = [] seeds = []
maps = { maps = OrderedDict(
"seed-to-soil": [], ("seed-to-soil", []),
"soil-to-fertilizer": [], ("soil-to-fertilizer", []),
"fertilizer-to-water": [], ("fertilizer-to-water", []),
"water-to-light": [], ("water-to-light", []),
"light-to-temperature": [], ("light-to-temperature", []),
"temperature-to-humidity": [], ("temperature-to-humidity", []),
"humidity-to-location": [], ("humidity-to-location", []),
} )
with open(input_file) as input: with open(input_file) as input:
current_map = {} current_map = {}
@@ -44,3 +45,21 @@ def extract(input_file: str) -> Tuple[List[int], Dict[str, List[AlmanacMap]]]:
destination, source, length = match.group(1).split(" ") destination, source, length = match.group(1).split(" ")
current_map.append(AlmanacMap(destination=int(destination), source=int(source), length=int(length))) current_map.append(AlmanacMap(destination=int(destination), source=int(source), length=int(length)))
return seeds, maps return seeds, maps
def next_maps(a_map: AlmanacMap, map_type: str, maps: OrderedDictType[str, AlmanacMap]) -> List[AlmanacMap]:
mini = a_map.destination
maxi = a_map.destination + a_map.length
maps_next_level = list(
filter(
lambda m: m.destination <= maxi and (m.destination + m.length) >= mini,
maps[maps.keys().index(map_type) + 1],
)
)
return maps_next_level
def seed_to_location_map(maps):
seed_to_location = []
for seed_group in maps["seed-to-soil"]:
return seed_to_location

33
day5/example_input.txt Normal file
View File

@@ -0,0 +1,33 @@
seeds: 79 14 55 13
seed-to-soil map:
50 98 2
52 50 48
soil-to-fertilizer map:
0 15 37
37 52 2
39 0 15
fertilizer-to-water map:
49 53 8
0 11 42
42 0 7
57 7 4
water-to-light map:
88 18 7
18 25 70
light-to-temperature map:
45 77 23
81 45 19
68 64 13
temperature-to-humidity map:
0 69 1
1 0 69
humidity-to-location map:
60 56 37
56 93 4

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3.11 #!/usr/bin/env python3.11
# Is killed on WSL2 after vmmem increase to 16GB # Is killed on WSL2 after vmmem increase to 16GB
from data import extract from day5.common import extract
seeds, maps = extract("input.txt") seeds, maps = extract("input.txt")
lowest_location = None lowest_location = None

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3.11 #!/usr/bin/env python3.11
# Is killed on WSL2 after vmmem increase to 16GB # Is killed on WSL2 after vmmem increase to 16GB
from data import extract from day5.common import extract
import concurrent.futures import concurrent.futures
def seed_to_location(seed, maps): def seed_to_location(seed, maps):

2
day6/example_input.txt Normal file
View File

@@ -0,0 +1,2 @@
Time: 7 15 30
Distance: 9 40 200

5
day7/example_input.txt Normal file
View File

@@ -0,0 +1,5 @@
32T3K 765
T55J5 684
KK677 28
KTJJT 220
QQQJA 483

9
day8/example_input.txt Normal file
View File

@@ -0,0 +1,9 @@
RL
AAA = (BBB, CCC)
BBB = (DDD, EEE)
CCC = (ZZZ, GGG)
DDD = (DDD, DDD)
EEE = (EEE, EEE)
GGG = (GGG, GGG)
ZZZ = (ZZZ, ZZZ)

82
day9/common.py Normal file
View File

@@ -0,0 +1,82 @@
from __future__ import annotations
from dataclasses import dataclass
import logging
from typing import List, Self
class color:
PURPLE = "\033[95m"
CYAN = "\033[96m"
DARKCYAN = "\033[36m"
BLUE = "\033[94m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
RED = "\033[91m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
END = "\033[0m"
@dataclass(init=False, eq=False)
class OASISLine:
sequence: List[int]
reduce_map: List[List[int]]
def __init__(self, input_line: str):
self.sequence = list(map(lambda i: int(i), input_line.split(" ")))
self.reduce_map = [
self.sequence,
]
def reduce(self) -> Self:
logging.debug(color.RED + f"reducing {self.sequence}" + color.END)
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
line = self.sequence
logging.debug(str(line))
while not all(map(lambda i: i == 0, line)):
new_line = []
for idx, number in enumerate(line):
if idx > 0:
new_line.append(number - line[idx - 1])
line = new_line
self.reduce_map.append(line)
logging.debug(str(line))
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
return self
def extrapolate(self):
logging.debug(color.RED + f"extrapolating {self.sequence}" + color.END)
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
next_increment = 0
for line in reversed(self.reduce_map):
if all(map(lambda i: i == line[0], line)):
next_increment = line[0]
else:
next_increment = line[len(line) - 1] + next_increment
logging.debug(str(line) + " " + color.BOLD + color.GREEN + f" {next_increment}" + color.END + "\n")
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
return next_increment
def extrapolate_back(self):
logging.debug(color.RED + f"extrapolating back {self.sequence}" + color.END)
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
next_increment = 0
for line in reversed(self.reduce_map):
if all(map(lambda i: i == line[0], line)):
next_increment = line[0]
else:
next_increment = line[0] - next_increment
logging.debug(color.BOLD + color.GREEN + f"{next_increment} " + color.END + str(line))
logging.debug(color.BLUE + "=".join(["" for i in range(40)]) + color.END)
return next_increment
@classmethod
def parse(cls, input_file: str) -> List[OASISLine]:
the_list = []
with open(input_file) as input:
while line := input.readline():
line = line.rstrip("\n").rstrip()
logging.debug(color.RED + f'parsing "{line}"' + color.END)
the_list.append(OASISLine(line))
logging.debug(color.GREEN + f"parsed {the_list[len(the_list) - 1].sequence}" + color.END)
return the_list

3
day9/example_input.txt Normal file
View File

@@ -0,0 +1,3 @@
0 3 6 9 12 15
1 3 6 10 15 21
10 13 16 21 30 45

7
day9/part1.py Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env python3.11
from common import OASISLine
if __name__ == "__main__":
analysis = OASISLine.parse("input.txt")
extrapolation = map(lambda o: o.reduce().extrapolate(), analysis)
print(sum(extrapolation))

7
day9/part2.py Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env python3.11
from common import OASISLine
if __name__ == "__main__":
analysis = OASISLine.parse("input.txt")
extrapolation = map(lambda o: o.reduce().extrapolate_back(), analysis)
print(sum(extrapolation))

21
day9/test.py Executable file
View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python3.11
import unittest
from common import OASISLine
import logging
class Day9Tests(unittest.TestCase):
def test_part1(self):
analysis = OASISLine.parse("example_input.txt")
extrapolation = map(lambda o: o.reduce().extrapolate(), analysis)
self.assertEqual(sum(extrapolation), 114)
def test_part2(self):
analysis = OASISLine.parse("example_input.txt")
extrapolation = map(lambda o: o.reduce().extrapolate_back(), analysis)
self.assertEqual(sum(extrapolation), 2)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)

View File

@@ -1,5 +1,9 @@
#!/bin/bash #!/bin/bash
DAY=`date "+%-e"` DAY=`date "+%-e"`
if [ ! -d day${DAY0} ]; then
mkdir day${DAY} mkdir day${DAY}
cp template/* day${DAY}
fi
# You have to get your cookie header in text format (Cookie: session=xxxx) # You have to get your cookie header in text format (Cookie: session=xxxx)
wget --verbose --header "`cat cookies.txt`" "https://adventofcode.com/2023/day/${DAY}/input" -O day${DAY}/input.txt wget --verbose --header "`cat cookies.txt`" "https://adventofcode.com/2023/day/${DAY}/input" -O day${DAY}/input.txt

1
requirements.txt Normal file
View File

@@ -0,0 +1 @@
shapely>=2.0.2

2
template/common.py Normal file
View File

@@ -0,0 +1,2 @@
def parse(input_file: str) -> T:
pass

View File

3
template/part1.py Executable file
View File

@@ -0,0 +1,3 @@
#!/usr/bin/env python3.11
from common import *

3
template/part2.py Executable file
View File

@@ -0,0 +1,3 @@
#!/usr/bin/env python3.11
from common import *

21
template/test.py Executable file
View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python3.11
from unittest import TestCase, main
import logging
from common import *
class DayXTests(TestCase):
def test_parsing(self):
pass
def test_part1(self):
pass
def test_part2(self):
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main(verbosity=2)