Home A Rust Implementation of a Simple Car DES
Post
Cancel

A Rust Implementation of a Simple Car DES

This post gives an implementation of this example from the SimPy documentation:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import simpy

def car(env):
    while True:
        print('Start parking at %d' % env.now)
        parking_duration = 5
        yield env.timeout(parking_duration)

        print('Start driving at %d' % env.now)
        trip_duration = 2
        yield env.timeout(trip_duration)

if __name__ == '__main__':
	env = simpy.Environment()
	env.process(car(env))
	env.run(until=15)

Based on my post A Rust Implementation of a Simple Clock DES I implemented the above car example. The major difference is the introduction of enums for driving events and parking events rather than just having a single type of event.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
// car.rs

use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::cmp::Reverse;

#[derive(Debug, PartialEq, PartialOrd)]
enum EventType {
    Parking,
    Driving,
}

#[derive(Debug, PartialEq, PartialOrd)]
struct Event {
    time: f64,
    event_type: EventType
}

impl Eq for Event {}

impl Ord for Event {
    fn cmp(&self, other: &Self) -> Ordering {
        self.partial_cmp(other).unwrap_or(Ordering::Equal)
    }
}

struct Environment {
    event_queue: BinaryHeap<Reverse<Event>>,
    clock: f64,
}

impl Environment {
    fn new() -> Self {
        Self {
            event_queue: BinaryHeap::new(),
            clock: 0.0,
        }
    }

    fn schedule_event(&mut self, event: Event) {
        self.event_queue.push(Reverse(event));
    }

    fn run_until(&mut self, end_time: f64) {
        while let Some(Reverse(current_event)) = self.event_queue.pop() {
            if current_event.time < end_time {
                self.clock = current_event.time;
                current_event.execute(self);
            } else {
                self.clock = end_time;
                break;
            }
        }
    }

    fn now(&self) -> f64 {
        self.clock
    }
}

impl Event {
    fn new(time: f64, event_type: EventType) -> Self {
        Self { time, event_type }
    }

    fn execute(&self, env: &mut Environment) {
        match self.event_type {
            EventType::Parking => {
                println!("Start parking at {}", self.time);
                let parking_duration = 5.0;
                env.schedule_event(Event::new(self.time + parking_duration, EventType::Driving));
            }
            EventType::Driving => {
                println!("Start driving at {}", self.time);
                let trip_duration = 2.0;
                env.schedule_event(Event::new(self.time + trip_duration, EventType::Parking));
            }
        }
    }
}

fn main() {
    let mut env = Environment::new();

    // Schedule the initial clock event
    env.schedule_event(Event::new(0.0, EventType::Parking));

    // Run the simulation until max time
    env.run_until(15.0);

}

I was curious to get a sense of the run time performance of my Rust implementation vs the SimPy implementation. I ran the following script to collect the run times of these two implementations (with and without PyPy3 for the SimPy implementation).

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import subprocess
import time

import matplotlib.pyplot as plt
import pandas as pd

REPLICATES = 1000

rust_cmd = ["./car"]
simpy_cmd = ["python3", "simpy_car.py"]
pypy_simpy_cmd = ["pypy3", "simpy_car.py"]

COMMANDS = [rust_cmd, simpy_cmd, pypy_simpy_cmd]


def capture_execution_time(command):
    try:
        # Run the command and capture both stdout and stderr
        result = subprocess.run(
            ["/usr/bin/time", "-v"] + command,
            stderr=subprocess.PIPE,
            stdout=subprocess.PIPE,
            universal_newlines=True,
        )

        # Check for errors
        result.check_returncode()

        # Extract time information from stderr
        time_info = result.stderr

        return time_info

    except subprocess.CalledProcessError as e:
        # Handle errors, if any
        print(f"Error: {e}")
        return None


results = []
for command in COMMANDS:
    for _ in range(REPLICATES):
        result = capture_execution_time(command)
        if result is not None:
            result = [
                line.replace("\t", "").replace("\n", "").split(": ")
                for line in result.split("\n\t")
            ]
            result = {i[0]: i[1] for i in result}
            print(_, result["Command being timed"], result["User time (seconds)"])
            results.append(result)


df = pd.DataFrame(results)

for col in df.columns:
    try:
        df[col] = df[col].astype(float)
    except Exception as e:
        print(e)

df.to_csv("results.csv", index=False)

With the results in a simple file format, I used common libraries to plot histograms of the run times.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import matplotlib.pyplot as plt
import pandas as pd

df = pd.read_csv("results.csv")


# Histogram
for group, groupdf in df.groupby("Command being timed"):
    plt.hist(groupdf["User time (seconds)"], label=group, bins=30)

plt.ylabel("Frequency")
plt.xlabel("Run Time (seconds)")
plt.xscale("log")
plt.legend()
plt.savefig("run_time_hist.png", dpi=300, transparent=True)
plt.close()


# Markdown table
result = ""

for group, group_df in df.groupby("Command being timed"):
    result += group.replace('"', "`") + "\n\n"
    result += group_df.describe().to_markdown() + "\n\n"

with open("results.md", "w") as f:
    f.write(result)

Here are the results:

Run Time of Rust vs SimPy Implementation

./car

 User time (seconds)System time (seconds)Average shared text size (kbytes)Average unshared data size (kbytes)Average stack size (kbytes)Average total size (kbytes)Maximum resident set size (kbytes)Average resident set size (kbytes)Major (requiring I/O) page faultsMinor (reclaiming a frame) page faultsVoluntary context switchesInvoluntary context switchesSwapsFile system inputsFile system outputsSocket messages sentSocket messages receivedSignals deliveredPage size (bytes)Exit status
count10001000100010001000100010001000100010001000100010001000100010001000100010001000
mean0000002071.520086.90610.02400000040960
std00000053.0929002.2036400.15312600000000
min000000193200811000000040960
25%000000202800861000000040960
50%000000208400871000000040960
75%000000210400891000000040960
max000000217600921100000040960

pypy3 simpy_car.py

 User time (seconds)System time (seconds)Average shared text size (kbytes)Average unshared data size (kbytes)Average stack size (kbytes)Average total size (kbytes)Maximum resident set size (kbytes)Average resident set size (kbytes)Major (requiring I/O) page faultsMinor (reclaiming a frame) page faultsVoluntary context switchesInvoluntary context switchesSwapsFile system inputsFile system outputsSocket messages sentSocket messages receivedSignals deliveredPage size (bytes)Exit status
count10001000100010001000100010001000100010001000100010001000100010001000100010001000
mean0.182730.0252700007949700.6719192.852.1834.6310136.16000040960
std0.02007610.00883205000081.561021.218926.465519.499612.829904305.76000000
min0.1400000786440086791000000040960
25%0.170.020000794520091861100000040960
50%0.180.020000795080091882200000040960
75%0.20.030000795200091902300000040960
max0.30.05000080020067193436182580136160000040960

python3 simpy_car.py

 User time (seconds)System time (seconds)Average shared text size (kbytes)Average unshared data size (kbytes)Average stack size (kbytes)Average total size (kbytes)Maximum resident set size (kbytes)Average resident set size (kbytes)Major (requiring I/O) page faultsMinor (reclaiming a frame) page faultsVoluntary context switchesInvoluntary context switchesSwapsFile system inputsFile system outputsSocket messages sentSocket messages receivedSignals deliveredPage size (bytes)Exit status
count10001000100010001000100010001000100010001000100010001000100010001000100010001000
mean0.133540.02021000019422003406.91110.0100000040960
std0.01718030.009866780000121.677002.43512010.004300000000
min0.0800000190680034001200000040960
25%0.120.010000193440034051600000040960
50%0.130.020000194440034071700000040960
75%0.140.030000195090034091900000040960
max0.20.05000019704003414111100000040960

Clearly the Rust implementation is faster, although the limited numerical precision makes it impossible to pin down by exactly how much faster it is. I would naïvely attribute that to Rust being a compiled (and statically typed) language that also doesn’t have a garbage collector. Interestingly the PyPy3 interpreter was actually slower than the CPython interpreter in this case.

This post is licensed under CC BY 4.0 by the author.

A Poem of Causality

A Ciw Implementation of SimPy's Movie Renege Example