feat: added day 9 part 1 and 2
All checks were successful
Build and run challenges / Challenge for day (1) (push) Successful in 3s
Build and run challenges / Challenge for day (2) (push) Successful in 3s
Build and run challenges / Challenge for day (4) (push) Successful in 5s
Build and run challenges / Challenge for day (5) (push) Successful in 5s
Build and run challenges / Challenge for day (6) (push) Successful in 5s
Build and run challenges / Challenge for day (3) (push) Successful in 11s
Build and run challenges / Challenge for day (7) (push) Successful in 4s
Build and run challenges / Challenge for day (8) (push) Successful in 3s
Build and run challenges / Challenge for day (9) (push) Successful in 3s
All checks were successful
Build and run challenges / Challenge for day (1) (push) Successful in 3s
Build and run challenges / Challenge for day (2) (push) Successful in 3s
Build and run challenges / Challenge for day (4) (push) Successful in 5s
Build and run challenges / Challenge for day (5) (push) Successful in 5s
Build and run challenges / Challenge for day (6) (push) Successful in 5s
Build and run challenges / Challenge for day (3) (push) Successful in 11s
Build and run challenges / Challenge for day (7) (push) Successful in 4s
Build and run challenges / Challenge for day (8) (push) Successful in 3s
Build and run challenges / Challenge for day (9) (push) Successful in 3s
Signed-off-by: Louis Vallat <contact@louis-vallat.dev>
This commit is contained in:
parent
013414d7a3
commit
b9a480c97a
@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Challenge for day
|
name: Challenge for day
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
day_number: [1, 2, 3, 4, 5, 6, 7, 8]
|
day_number: [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
runs-on: rust-bookworm
|
runs-on: rust-bookworm
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
|
6
day9/Cargo.toml
Normal file
6
day9/Cargo.toml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[package]
|
||||||
|
name = "day9"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
118
day9/src/main.rs
Normal file
118
day9/src/main.rs
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
use std::collections::{HashSet, LinkedList};
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
|
fn read_input(path: &str) -> String {
|
||||||
|
fs::read_to_string(path).expect("Cannot read file.")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_input(input: &str) -> LinkedList<(Option<usize>, usize)> {
|
||||||
|
let mut nodes = LinkedList::new();
|
||||||
|
let mut is_file = true;
|
||||||
|
let mut id = 0;
|
||||||
|
|
||||||
|
for x in input.chars().filter_map(|c| c.to_digit(10)) {
|
||||||
|
if x != 0 {
|
||||||
|
if is_file {
|
||||||
|
nodes.push_back((Some(id), x as usize));
|
||||||
|
id += 1;
|
||||||
|
} else {
|
||||||
|
nodes.push_back((None, x as usize));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is_file = !is_file;
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checksum<'a, I>(nodes: I) -> usize
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = &'a (Option<usize>, usize)>,
|
||||||
|
{
|
||||||
|
let mut index = 0;
|
||||||
|
let mut sum = 0;
|
||||||
|
for node in nodes {
|
||||||
|
if let Some(id) = node.0 {
|
||||||
|
sum += (node.1 * ((id * index) + (id * (index + node.1 - 1)))) / 2;
|
||||||
|
}
|
||||||
|
index += node.1;
|
||||||
|
}
|
||||||
|
sum
|
||||||
|
}
|
||||||
|
|
||||||
|
fn part_1(disk_map: &LinkedList<(Option<usize>, usize)>) -> usize {
|
||||||
|
let mut disk_map = disk_map.clone();
|
||||||
|
let mut defragmented = LinkedList::new();
|
||||||
|
let mut free_len = 0;
|
||||||
|
while let Some(node) = disk_map.pop_front() {
|
||||||
|
if node.0.is_some() {
|
||||||
|
defragmented.push_back(node);
|
||||||
|
} else {
|
||||||
|
let to_place = node.1;
|
||||||
|
free_len += to_place;
|
||||||
|
while let Some(node) = disk_map.pop_back() {
|
||||||
|
if let Some(id) = node.0 {
|
||||||
|
if node.1 > to_place {
|
||||||
|
defragmented.push_back((Some(id), to_place));
|
||||||
|
disk_map.push_back((Some(id), node.1 - to_place));
|
||||||
|
} else {
|
||||||
|
if node.1 < to_place {
|
||||||
|
disk_map.push_front((None, to_place - node.1));
|
||||||
|
free_len -= to_place - node.1;
|
||||||
|
}
|
||||||
|
defragmented.push_back(node);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
free_len += node.1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
defragmented.push_back((None, free_len));
|
||||||
|
checksum(&defragmented)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn part_2(disk_map: &LinkedList<(Option<usize>, usize)>) -> usize {
|
||||||
|
let mut disk_map = Vec::from_iter(disk_map.clone());
|
||||||
|
let mut defragmented_ids = HashSet::new();
|
||||||
|
let mut i = disk_map.len() - 1;
|
||||||
|
|
||||||
|
while i > 0 {
|
||||||
|
let id = disk_map[i].0.unwrap_or_default();
|
||||||
|
if disk_map[i].0.is_some() && !defragmented_ids.contains(&id) {
|
||||||
|
defragmented_ids.insert(id);
|
||||||
|
let needed_space = disk_map[i].1;
|
||||||
|
for j in 0..i {
|
||||||
|
if disk_map[j].0.is_none() {
|
||||||
|
let available_space = disk_map[j].1;
|
||||||
|
if available_space >= needed_space {
|
||||||
|
disk_map.swap(i, j);
|
||||||
|
if available_space > needed_space {
|
||||||
|
disk_map.insert(j + 1, (None, available_space - needed_space));
|
||||||
|
disk_map[j].1 = needed_space;
|
||||||
|
disk_map[i + 1].1 = needed_space;
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i -= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
checksum(&disk_map)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let args: Vec<String> = env::args().collect();
|
||||||
|
for arg in args.iter().skip(1) {
|
||||||
|
let input = read_input(&arg);
|
||||||
|
let disk_map = parse_input(&input);
|
||||||
|
println!("[{}]", &arg);
|
||||||
|
println!("\t[Part 1] => Answer is '{}'.", part_1(&disk_map));
|
||||||
|
println!("\t[Part 2] => Answer is '{}'.", part_2(&disk_map));
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user