LLVM 20.0.0git
interactive_host.py
Go to the documentation of this file.
1"""Utility for testing InteractiveModelRunner.
2
3Use it from pass-specific tests by providing a main .py which calls this library's
4`run_interactive` with an appropriate callback to provide advice.
5
6From .ll tests, just call the above-mentioned main as a prefix to the opt/llc
7invocation (with the appropriate flags enabling the interactive mode)
8
9Examples:
10test/Transforms/Inline/ML/interactive-mode.ll
11test/CodeGen/MLRegAlloc/interactive-mode.ll
12"""
13
14import ctypes
15import log_reader
16import io
17import math
18import os
19import subprocess
20from typing import Callable, List, Union
21
22
23def send(f: io.BufferedWriter, value: Union[int, float], spec: log_reader.TensorSpec):
24 """Send the `value` - currently just a scalar - formatted as per `spec`."""
25
26 # just int64 for now
27 assert spec.element_type == ctypes.c_int64
28 to_send = ctypes.c_int64(int(value))
29 assert f.write(bytes(to_send)) == ctypes.sizeof(spec.element_type) * math.prod(
30 spec.shape
31 )
32 f.flush()
33
34
36 temp_rootname: str,
37 make_response: Callable[[List[log_reader.TensorValue]], Union[int, float]],
38 process_and_args: List[str],
39):
40 """Host the compiler.
41 Args:
42 temp_rootname: the base file name from which to construct the 2 pipes for
43 communicating with the compiler.
44 make_response: a function that, given the current tensor values, provides a
45 response.
46 process_and_args: the full commandline for the compiler. It it assumed it
47 contains a flag poiting to `temp_rootname` so that the InteractiveModeRunner
48 would attempt communication on the same pair as this function opens.
49
50 This function sets up the communication with the compiler - via 2 files named
51 `temp_rootname`.in and `temp_rootname`.out - prints out the received features,
52 and sends back to the compiler an advice (which it gets from `make_response`).
53 It's used for testing, and also to showcase how to set up communication in an
54 interactive ML ("gym") environment.
55 """
56 to_compiler = temp_rootname + ".in"
57 from_compiler = temp_rootname + ".out"
58 try:
59 os.mkfifo(to_compiler, 0o666)
60 os.mkfifo(from_compiler, 0o666)
61 compiler_proc = subprocess.Popen(
62 process_and_args, stderr=subprocess.PIPE, stdout=subprocess.DEVNULL
63 )
64 with io.BufferedWriter(io.FileIO(to_compiler, "wb")) as tc:
65 with io.BufferedReader(io.FileIO(from_compiler, "rb")) as fc:
66 tensor_specs, _, advice_spec = log_reader.read_header(fc)
67 context = None
68 while compiler_proc.poll() is None:
69 next_event = fc.readline()
70 if not next_event:
71 break
72 (
73 last_context,
74 observation_id,
75 features,
76 _,
78 context, next_event, fc, tensor_specs, None
79 )
80 if last_context != context:
81 print(f"context: {last_context}")
82 context = last_context
83 print(f"observation: {observation_id}")
84 tensor_values = []
85 for fv in features:
87 tensor_values.append(fv)
88 send(tc, make_response(tensor_values), advice_spec)
89 _, err = compiler_proc.communicate()
90 print(err.decode("utf-8"))
91 compiler_proc.wait()
92
93 finally:
94 os.unlink(to_compiler)
95 os.unlink(from_compiler)
static void print(raw_ostream &Out, object::Archive::Kind Kind, T Val)
def send(io.BufferedWriter f, Union[int, float] value, log_reader.TensorSpec spec)
def run_interactive(str temp_rootname, Callable[[List[log_reader.TensorValue]], Union[int, float]] make_response, List[str] process_and_args)
def read_one_observation(Optional[str] context, str event_str, io.BufferedReader f, List[TensorSpec] tensor_specs, Optional[TensorSpec] score_spec)
Definition: log_reader.py:93
def read_header(io.BufferedReader f)
Definition: log_reader.py:79
def pretty_print_tensor_value(TensorValue tv)
Definition: log_reader.py:75