neuralworm commited on
Commit
711c0ff
1 Parent(s): 90c9489

initial commit

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +15 -66
  3. psychohistory.py +6 -3
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ __pycache__
app.py CHANGED
@@ -1,80 +1,29 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
- import gen
4
  import psychohistory
5
 
6
- """
7
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
8
- """
9
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
10
-
11
-
12
- def respond(
13
- message,
14
- history: list[tuple[str, str]],
15
- system_message,
16
- max_tokens,
17
- temperature,
18
- top_p,
19
- ):
20
- messages = [{"role": "system", "content": system_message}]
21
-
22
- for val in history:
23
- if val[0]:
24
- messages.append({"role": "user", "content": val[0]})
25
- if val[1]:
26
- messages.append({"role": "assistant", "content": val[1]})
27
-
28
- messages.append({"role": "user", "content": message})
29
-
30
- response = ""
31
-
32
- for message in client.chat_completion(
33
- messages,
34
- max_tokens=max_tokens,
35
- stream=True,
36
- temperature=temperature,
37
- top_p=top_p,
38
- ):
39
- token = message.choices[0].delta.content
40
-
41
- response += token
42
- yield response
43
-
44
- """
45
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
46
- """
47
-
48
-
49
  with gr.Blocks(title="PSYCHOHISTORY") as app:
50
-
51
-
52
  with gr.Tab("Search"):
53
  with gr.Row():
54
- txt_search = gr.Textbox(value="Iran and Israel war",label="Search Term",scale=5)
55
- btn_search = gr.Button("Look",scale=1)
56
  with gr.Row():
57
- #search_results = gr.Dataframe(type="pandas")
58
  mem_results = gr.JSON(label="Results")
59
  btn_search.click(
60
- gen.generate,
61
  inputs=[txt_search],
62
  outputs=mem_results
63
- )
64
-
65
- #with gr.Row():
66
- # big_block = gr.HTML("""
67
- # <iframe style="scroll-padding-left: 50%; relative;background-color: #fff; height: 75vh; width: 100%; overflow-y: hidden; overflow-x: hidden;" src="https://holmesbot.com/api/shared?id=16657e456d9514"></iframe>
68
- # """)
69
-
70
- with gr.Tab("Graph"):
71
- gr.load("models/stabilityai/stable-diffusion-xl-base-1.0")
72
- with gr.Tab("Chat"):
73
-
74
- gr.ChatInterface(
75
- respond,
76
- )
77
 
 
 
 
 
 
 
78
 
79
  if __name__ == "__main__":
80
- app.launch()
 
1
  import gradio as gr
2
+ import gen
 
3
  import psychohistory
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  with gr.Blocks(title="PSYCHOHISTORY") as app:
 
 
6
  with gr.Tab("Search"):
7
  with gr.Row():
8
+ txt_search = gr.Textbox(value="Iran and Israel war", label="Search Term", scale=5)
9
+ btn_search = gr.Button("Look", scale=1)
10
  with gr.Row():
 
11
  mem_results = gr.JSON(label="Results")
12
  btn_search.click(
13
+ gen.generate,
14
  inputs=[txt_search],
15
  outputs=mem_results
16
+ )
17
+
18
+ with gr.Row():
19
+ img_output = gr.Image(label="Graph Visualization", type="filepath") # Add an Image component
 
 
 
 
 
 
 
 
 
 
20
 
21
+ # Trigger graph generation after JSON is generated
22
+ mem_results.change(
23
+ psychohistory.main,
24
+ inputs=[mem_results],
25
+ outputs=img_output
26
+ )
27
 
28
  if __name__ == "__main__":
29
+ app.launch()
psychohistory.py CHANGED
@@ -156,8 +156,9 @@ def draw_global_tree_3d(G, filename='global_tree.png'):
156
  plt.savefig(filename, bbox_inches='tight')
157
  plt.close()
158
 
159
- def main(mode, input_file=None):
160
  G = nx.DiGraph()
 
161
 
162
  if mode == 'random':
163
  generate_tree(0, 0, 0, 5, 3, 10, G)
@@ -169,7 +170,7 @@ def main(mode, input_file=None):
169
  print("Invalid mode or input file not provided.")
170
  return
171
 
172
- draw_global_tree_3d(G)
173
 
174
  best_path, best_mean_prob, worst_path, worst_mean_prob, longest_path, shortest_path = find_paths(G)
175
 
@@ -186,7 +187,7 @@ def main(mode, input_file=None):
186
  print(f"\nPath with the shortest duration: {' -> '.join(map(str, shortest_path))}")
187
  print(f"Duration: {max(G.nodes[node]['pos'][0] for node in shortest_path) - min(G.nodes[node]['pos'][0] for node in shortest_path):.2f}")
188
 
189
- draw_global_tree_3d(G)
190
 
191
  if best_path:
192
  draw_path_3d(G, best_path, 'best_path.png', 'blue')
@@ -197,6 +198,8 @@ def main(mode, input_file=None):
197
  if shortest_path:
198
  draw_path_3d(G, shortest_path, 'shortest_duration_path.png', 'purple')
199
 
 
 
200
 
201
  if __name__ == "__main__":
202
  if len(sys.argv) < 2:
 
156
  plt.savefig(filename, bbox_inches='tight')
157
  plt.close()
158
 
159
+ def main(json_data):
160
  G = nx.DiGraph()
161
+ build_graph_from_json(json_data, G)
162
 
163
  if mode == 'random':
164
  generate_tree(0, 0, 0, 5, 3, 10, G)
 
170
  print("Invalid mode or input file not provided.")
171
  return
172
 
173
+ draw_global_tree_3d(G, filename='global_tree.png')
174
 
175
  best_path, best_mean_prob, worst_path, worst_mean_prob, longest_path, shortest_path = find_paths(G)
176
 
 
187
  print(f"\nPath with the shortest duration: {' -> '.join(map(str, shortest_path))}")
188
  print(f"Duration: {max(G.nodes[node]['pos'][0] for node in shortest_path) - min(G.nodes[node]['pos'][0] for node in shortest_path):.2f}")
189
 
190
+ draw_global_tree_3d(G, filename='global_tree.png')
191
 
192
  if best_path:
193
  draw_path_3d(G, best_path, 'best_path.png', 'blue')
 
198
  if shortest_path:
199
  draw_path_3d(G, shortest_path, 'shortest_duration_path.png', 'purple')
200
 
201
+ return 'global_tree.png'
202
+
203
 
204
  if __name__ == "__main__":
205
  if len(sys.argv) < 2: