1 """Generate a mock model for LLVM tests.
3 The generated model is not a neural net - it is just a tf.function with the
4 correct input and output parameters. By construction, the mock model will always
12 import tensorflow
as tf
14 POLICY_DECISION_LABEL =
'inlining_decision'
15 POLICY_OUTPUT_SPEC =
"""
18 "logging_name": "inlining_decision",
20 "name": "StatefulPartitionedCall",
34 """Returns the list of features for LLVM inlining."""
37 tf.TensorSpec(dtype=tf.int64, shape=(), name=key)
for key
in [
38 'caller_basic_block_count',
39 'caller_conditionally_executed_blocks',
41 'callee_basic_block_count',
42 'callee_conditionally_executed_blocks',
54 'call_argument_setup',
55 'load_relative_intrinsic',
56 'lowered_call_arg_setup',
57 'indirect_call_penalty',
59 'case_cluster_penalty',
61 'unsimplified_common_instructions',
64 'simplified_instructions',
66 'constant_offset_ptr_args',
69 'last_call_to_static_bonus',
72 'nested_inline_cost_estimate',
79 tf.TensorSpec(dtype=tf.float32, shape=(), name=key)
80 for key
in [
'discount',
'reward']
85 tf.TensorSpec(dtype=tf.int32, shape=(), name=key)
86 for key
in [
'step_type']
92 return POLICY_DECISION_LABEL
96 return POLICY_OUTPUT_SPEC
99 return os.path.join(path,
'output_spec.json')
103 """Build and save the mock model with the given signature"""
108 module.var = tf.Variable(0.)
111 s = tf.reduce_sum([tf.cast(x, tf.float32)
for x
in tf.nest.flatten(inputs)])
112 return {signature[
'output']: float(
'inf') + s + module.var}
114 module.action = tf.function()(action)
115 action = {
'action': module.action.get_concrete_function(signature[
'inputs'])}
116 tf.saved_model.save(module, path, signatures=action)
119 with open(output_spec_path,
'w')
as f:
120 print(f
'Writing output spec to {output_spec_path}.')
121 f.write(signature[
'output_spec'])
133 assert len(argv) == 2
136 print(f
'Output model to: [{argv[1]}]')
141 if __name__ ==
'__main__':