Zwea Htet commited on
Commit
215cfd3
1 Parent(s): b17ddeb

revert to previous version

Browse files
Files changed (1) hide show
  1. models/llamaCustom.py +4 -6
models/llamaCustom.py CHANGED
@@ -5,8 +5,8 @@ from typing import Any, List, Mapping, Optional
5
 
6
  import numpy as np
7
  import openai
8
- import streamlit as st
9
  import pandas as pd
 
10
  from dotenv import load_dotenv
11
  from huggingface_hub import HfFileSystem
12
  from langchain.llms.base import LLM
@@ -71,15 +71,13 @@ class CustomLLM(LLM):
71
  def _llm_type(self) -> str:
72
  return "custom"
73
 
74
- @st.cache_resource
75
  class LlamaCustom:
76
-
77
  def __init__(self, model_name: str) -> None:
78
  self.vector_index = self.initialize_index(model_name=model_name)
79
 
80
  def initialize_index(self, model_name: str):
81
  index_name = model_name.split("/")[-1]
82
-
83
  file_path = f"./vectorStores/{index_name}"
84
  if os.path.exists(path=file_path):
85
  # rebuild storage context
@@ -103,7 +101,7 @@ class LlamaCustom:
103
  service_context = ServiceContext.from_defaults(
104
  llm_predictor=llm_predictor, prompt_helper=prompt_helper
105
  )
106
-
107
  # documents = prepare_data(r"./assets/regItems.json")
108
  documents = SimpleDirectoryReader(input_dir="./assets/pdf").load_data()
109
 
@@ -123,4 +121,4 @@ class LlamaCustom:
123
  print("query_str: ", query_str)
124
  query_engine = self.vector_index.as_query_engine()
125
  response = query_engine.query(query_str)
126
- return str(response)
 
5
 
6
  import numpy as np
7
  import openai
 
8
  import pandas as pd
9
+ import streamlit as st
10
  from dotenv import load_dotenv
11
  from huggingface_hub import HfFileSystem
12
  from langchain.llms.base import LLM
 
71
  def _llm_type(self) -> str:
72
  return "custom"
73
 
 
74
  class LlamaCustom:
 
75
  def __init__(self, model_name: str) -> None:
76
  self.vector_index = self.initialize_index(model_name=model_name)
77
 
78
  def initialize_index(self, model_name: str):
79
  index_name = model_name.split("/")[-1]
80
+
81
  file_path = f"./vectorStores/{index_name}"
82
  if os.path.exists(path=file_path):
83
  # rebuild storage context
 
101
  service_context = ServiceContext.from_defaults(
102
  llm_predictor=llm_predictor, prompt_helper=prompt_helper
103
  )
104
+
105
  # documents = prepare_data(r"./assets/regItems.json")
106
  documents = SimpleDirectoryReader(input_dir="./assets/pdf").load_data()
107
 
 
121
  print("query_str: ", query_str)
122
  query_engine = self.vector_index.as_query_engine()
123
  response = query_engine.query(query_str)
124
+ return str(response)