diff --git a/src/running/benchmark.py b/src/running/benchmark.py index 642dd96..7a434fc 100644 --- a/src/running/benchmark.py +++ b/src/running/benchmark.py @@ -318,3 +318,40 @@ def get_full_args(self, runtime: Runtime) -> List[Union[str, Path]]: cmd.extend(["-n", "1"]) # one run cmd.extend(self.program_args) return cmd + +class ChromeBenchmark(Benchmark): + def __init__(self, chrome_args: List[str], js_args: List[str], + timing_iteration: int, benchmark_url: str, **kwargs): + super().__init__(**kwargs) + self.chrome_args = chrome_args + self.js_args = js_args + self.benchmark_url = benchmark_url + self.timing_iteration = timing_iteration + + def __str__(self) -> str: + return self.to_string(DummyRuntime("chrome")) + + def attach_modifiers(self, modifiers: Sequence[Modifier]) -> 'ChromeBenchmark': + jb = super().attach_modifiers(modifiers) + for m in modifiers: + if type(m) == JSArg: + jb.js_args.extend(m.val) + elif type(m) == ChromeArg: + jb.chrome_args.extend(m.val) + elif type(m) == ProgramArg: + jb.program_args.extend(m.val) + return jb + + def get_full_args(self, runtime: Runtime) -> List[Union[str, Path]]: + cmd = super().get_full_args(runtime) + cmd.append(runtime.get_executable()) + cmd.extend(self.chrome_args) + # Chrome workaround to remove the cache directory between iterations. + cmd.append(" --user-data-dir=/tmp/chrome-expr-cache") + s = '' + for j in self.js_args: + s += j + " " + cmd.append("--js-flags={}".format(s)) + full_url = "{}?startAutomatically&iterationCount={}&suite={}".format(self.benchmark_url, self.timing_iteration, self.timing_iteration, self.name) + cmd.append(full_url) + return cmd diff --git a/src/running/command/minheap.py b/src/running/command/minheap.py index e9a2153..baf882f 100644 --- a/src/running/command/minheap.py +++ b/src/running/command/minheap.py @@ -54,9 +54,16 @@ def log(s): log("o ") return ContinueSearch.HeapTooBig elif subprocess_exit is SubprocessrExit.Timeout: - # A timeout is likely due to heap being too small and many GCs scheduled back to back - log("t ") - return ContinueSearch.HeapTooSmall + if suite.is_passed(output): + # A hack for Chrome benchmarks so that a run is considered successful + # when Chrome remains active after all iterations complete then killed due to + # a timeout error. + log("o ") + return ContinueSearch.HeapTooBig + else: + # A timeout is likely due to heap being too small and many GCs scheduled back to back + log("t ") + return ContinueSearch.HeapTooSmall # If not the above scenario, we treat this invocation as a crash or some kind of erroneous state log(".") continue diff --git a/src/running/command/runbms.py b/src/running/command/runbms.py index 4ee664a..5b5bb41 100644 --- a/src/running/command/runbms.py +++ b/src/running/command/runbms.py @@ -196,6 +196,8 @@ def hz_to_ghz(hzstr: str) -> str: def get_log_prologue(runtime: Runtime, bm: Benchmark) -> str: output = "\n-----\n" output += "mkdir -p PLOTTY_WORKAROUND; timedrun; " + # Chrome workaround to remove the cache directory between iterations. + output += "rm -rf /tmp/chrome-expr-cache; " output += bm.to_string(runtime) output += "\n" output += "running-ng v{}\n".format(__VERSION__) @@ -310,8 +312,15 @@ def run_one_benchmark( if runtime.is_oom(output): oomed_count[c] += 1 if exit_status is SubprocessrExit.Timeout: - timeout_count[c] += 1 - print(".", end="", flush=True) + if suite.is_passed(output): + # A hack for Chrome benchmarks so that a run is considered successful + # when Chrome remains active after all iterations complete then killed due to + # a timeout error. + config_passed = True + print(config_index_to_chr(j), end="", flush=True) + else: + timeout_count[c] += 1 + print(".", end="", flush=True) elif exit_status is SubprocessrExit.Error: print(".", end="", flush=True) elif exit_status is SubprocessrExit.Normal: diff --git a/src/running/modifier.py b/src/running/modifier.py index 594620c..8d2356e 100644 --- a/src/running/modifier.py +++ b/src/running/modifier.py @@ -180,3 +180,12 @@ def __init__(self, value_opts=None, **kwargs): def __str__(self) -> str: return "{} JuliaArg {}".format(super().__str__(), self.val) + +@register(Modifier) +class ChromeArg(Modifier): + def __init__(self, value_opts=None, **kwargs): + super().__init__(value_opts, **kwargs) + self.val = split_quoted(self._kwargs["val"]) + + def __str__(self) -> str: + return "{} ChromeArg {}".format(super().__str__(), self.val) diff --git a/src/running/runtime.py b/src/running/runtime.py index b7e2115..c4ddbfe 100644 --- a/src/running/runtime.py +++ b/src/running/runtime.py @@ -164,6 +164,25 @@ def is_oom(self, output: bytes) -> bool: return True return False +@register(Runtime) +class Chrome(JavaScriptRuntime): + def __str__(self): + return "{} chrome {}".format(super().__str__(), self.executable) + + def get_heapsize_modifiers(self, size: int) -> List[Modifier]: + size_str = "{}".format(size) + heapsize = JSArg( + name="heap{}".format(size_str), + val="--initial-heap-size={} --max-heap-size={}".format( + size_str, size_str) + ) + return [heapsize] + + def is_oom(self, output: bytes) -> bool: + for pattern in [b"Fatal javascript OOM in", b"Fatal JavaScript out of memory", b"V8 javascript OOM", b"<--- Last few GCs --->"]: + if pattern in output: + return True + return False @register(Runtime) class SpiderMonkey(JavaScriptRuntime): diff --git a/src/running/suite.py b/src/running/suite.py index cdf6781..ad3efc2 100644 --- a/src/running/suite.py +++ b/src/running/suite.py @@ -6,6 +6,7 @@ Benchmark, JavaScriptBenchmark, JuliaBenchmark, + ChromeBenchmark, ) import logging from running.util import register, split_quoted @@ -517,3 +518,73 @@ def get_benchmark(self, bm_spec: Union[str, Dict[str, Any]]) -> "JuliaBenchmark" def is_passed(self, output: bytes) -> bool: # FIXME return True + +@register(BenchmarkSuite) +class Speedometer(BenchmarkSuite): + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.timing_iteration = parse_timing_iteration( + kwargs.get("timing_iteration"), "Speedometer") + self.benchmark_url = kwargs.get("benchmark_url") + self.minheap: Optional[str] + self.minheap = kwargs.get("minheap") + self.minheap_values: Dict[str, Dict[str, int]] + self.minheap_values = kwargs.get("minheap_values", {}) + if not isinstance(self.minheap_values, dict): + raise TypeError( + "The minheap_values of {} should be a dictionary".format(self.name) + ) + if self.minheap: + if not isinstance(self.minheap, str): + raise TypeError( + "The minheap of {} should be a string that selects from a minheap_values".format( + self.name + ) + ) + if self.minheap not in self.minheap_values: + raise KeyError( + "{} is not a valid entry of {}.minheap_values".format( + self.name, self.name + ) + ) + self.timeout: Optional[int] + self.timeout = kwargs.get("timeout") + + def __str__(self) -> str: + return "{} Speedometer {}".format(super().__str__(), self.benchmark_url) + + def get_benchmark(self, bm_spec: Union[str, Dict[str, Any]]) -> 'ChromeBenchmark': + assert type(bm_spec) is str + + return ChromeBenchmark( + suite_name=self.name, + name=bm_spec, + chrome_args=[], + js_args=[], + timing_iteration=self.timing_iteration, + benchmark_url=self.benchmark_url, + timeout=self.timeout, + ) + + def get_minheap(self, bm: Benchmark) -> int: + assert isinstance(bm, ChromeBenchmark) + name = bm.name + if not self.minheap: + logging.warning( + "No minheap_value of {} is selected".format(self)) + return __DEFAULT_MINHEAP + minheap = self.minheap_values[self.minheap] + if name not in minheap: + logging.warning( + "Minheap for {} of {} not set".format(name, self)) + return __DEFAULT_MINHEAP + return minheap[name] + + def is_passed(self, output: bytes) -> bool: + # A hack for Chrome benchmarks so that a run is considered successful + # when Chrome remains active after all iterations complete then killed due to + # a timeout error. + iter_compelted = output.decode("utf-8").count("End MMTk Statistics") + if iter_compelted == self.timing_iteration: + return True + return False \ No newline at end of file