botbottingbot commited on
Commit
dea14e3
·
verified ·
1 Parent(s): 57e0473

Create App.py

Browse files
Files changed (1) hide show
  1. App.py +111 -0
App.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from functools import partial
3
+ import gradio as gr
4
+ from transformers import pipeline
5
+
6
+ with open("modules.json", "r", encoding="utf-8") as f:
7
+ MODULES = json.load(f)["modules"]
8
+
9
+ GENERATORS = [m for m in MODULES if m["type"] == "generator"]
10
+ CHECKERS = {m["id"]: m for m in MODULES if m["type"] == "checker"}
11
+ GEN_BY_ID = {m["id"]: m for m in GENERATORS}
12
+
13
+ llm = pipeline("text-generation", model="gpt2", max_new_tokens=512)
14
+
15
+ def call_llm(prompt):
16
+ out = llm(prompt, max_new_tokens=512, do_sample=False)[0]["generated_text"]
17
+ return out[len(prompt):].strip() if out.startswith(prompt) else out
18
+
19
+ def generator_prompt(module_id, *inputs):
20
+ m = GEN_BY_ID[module_id]
21
+ keys = list(m["input_placeholders"].keys())
22
+ inputs = {k: inputs[i] if i < len(inputs) else "" for i, k in enumerate(keys)}
23
+ sections = m["output_sections"]
24
+
25
+ p = []
26
+ p.append("You are a structured reasoning module.")
27
+ p.append(f"MODULE: {m['label']}")
28
+ p.append("INPUTS:")
29
+ for k, v in inputs.items():
30
+ p.append(f"{k.upper()}: {v}")
31
+ p.append("")
32
+ p.append("Produce the following sections:")
33
+ for s in sections:
34
+ p.append(f"- {s}")
35
+ p.append("")
36
+ for s in sections:
37
+ p.append(f"{s}:")
38
+ p.append("[content]")
39
+ p.append("")
40
+ return "\n".join(p)
41
+
42
+ def checker_prompt(checker_id, *vals):
43
+ c = CHECKERS[checker_id]
44
+ secs = c["output_sections"]
45
+ if len(vals) == 0:
46
+ orig, draft = "", ""
47
+ elif len(vals) == 1:
48
+ orig, draft = "", vals[0]
49
+ else:
50
+ *orig_parts, draft = vals
51
+ orig = "\n\n".join(orig_parts)
52
+
53
+ p = []
54
+ p.append("You are a strict evaluator.")
55
+ p.append(f"CHECKER: {c['label']}")
56
+ p.append("ORIGINAL TASK:")
57
+ p.append(orig)
58
+ p.append("")
59
+ p.append("DRAFT OUTPUT:")
60
+ p.append(draft)
61
+ p.append("")
62
+ p.append("Respond with sections:")
63
+ for s in secs:
64
+ p.append(f"- {s}")
65
+ p.append("")
66
+ for s in secs:
67
+ p.append(f"{s}:")
68
+ p.append("[content]")
69
+ p.append("")
70
+ return "\n".join(p)
71
+
72
+ def run_generator(mid, *inputs):
73
+ return call_llm(generator_prompt(mid, *inputs))
74
+
75
+ def run_checker(cid, *inputs):
76
+ return call_llm(checker_prompt(cid, *inputs))
77
+
78
+ def build_ui():
79
+ with gr.Blocks(title="Modular Intelligence") as demo:
80
+ gr.Markdown("# Modular Intelligence\nSelect a module and generate output.")
81
+
82
+ for m in GENERATORS:
83
+ with gr.Tab(m["label"]):
84
+ gr.Markdown(m["description"])
85
+ input_boxes = []
86
+ for key, ph in m["input_placeholders"].items():
87
+ tb = gr.Textbox(label=key, placeholder=ph, lines=4)
88
+ input_boxes.append(tb)
89
+
90
+ output_box = gr.Textbox(label="Module output", lines=18)
91
+ gr.Button("Run module").click(
92
+ fn=partial(run_generator, m["id"]),
93
+ inputs=input_boxes,
94
+ outputs=output_box
95
+ )
96
+
97
+ if m.get("has_checker"):
98
+ cid = m.get("checker_id")
99
+ if cid in CHECKERS:
100
+ gr.Markdown("### Checker")
101
+ checker_out = gr.Textbox(label="Checker output", lines=14)
102
+ gr.Button("Run checker").click(
103
+ fn=partial(run_checker, cid),
104
+ inputs=input_boxes + [output_box],
105
+ outputs=checker_out
106
+ )
107
+ return demo
108
+
109
+ if __name__ == "__main__":
110
+ app = build_ui()
111
+ app.launch()