oceansweep commited on
Commit
42e5a58
1 Parent(s): ee6aa85

Upload Article_Summarization_Lib.py

Browse files
App_Function_Libraries/Web_Scraping/Article_Summarization_Lib.py CHANGED
@@ -26,10 +26,12 @@ import requests
26
  # 3rd-Party Imports
27
  #
28
  # Local Imports
29
- from App_Function_Libraries.Utils.Utils import sanitize_filename
30
  from App_Function_Libraries.Web_Scraping.Article_Extractor_Lib import scrape_article
31
- from App_Function_Libraries.Summarization.Local_Summarization_Lib import summarize_with_llama, summarize_with_oobabooga, summarize_with_tabbyapi, \
32
- summarize_with_vllm, summarize_with_kobold, save_summary_to_file, summarize_with_local_llm
 
 
33
  from App_Function_Libraries.Summarization.Summarization_General_Lib import summarize_with_openai, summarize_with_anthropic, summarize_with_cohere, \
34
  summarize_with_groq, summarize_with_openrouter, summarize_with_deepseek, summarize_with_huggingface, \
35
  summarize_with_mistral
@@ -104,7 +106,7 @@ def scrape_and_summarize(url, custom_prompt_arg, api_name, api_key, keywords, cu
104
 
105
  with open(json_file_path, 'w') as json_file:
106
  json.dump([{'text': content}], json_file, indent=2)
107
-
108
  try:
109
  if api_name.lower() == 'openai':
110
  # def summarize_with_openai(api_key, input_data, custom_prompt_arg)
@@ -138,8 +140,7 @@ def scrape_and_summarize(url, custom_prompt_arg, api_name, api_key, keywords, cu
138
  elif api_name.lower() == "llama.cpp":
139
  logging.debug(f"MAIN: Trying to summarize with Llama.cpp")
140
  # def summarize_with_llama(api_url, file_path, token, custom_prompt)
141
- summary = summarize_with_llama(json_file_path, article_custom_prompt, system_message)
142
-
143
  elif api_name.lower() == "kobold":
144
  logging.debug(f"MAIN: Trying to summarize with Kobold.cpp")
145
  # def summarize_with_kobold(input_data, kobold_api_token, custom_prompt_input, api_url):
@@ -156,12 +157,21 @@ def scrape_and_summarize(url, custom_prompt_arg, api_name, api_key, keywords, cu
156
  elif api_name.lower() == "vllm":
157
  logging.debug(f"MAIN: Trying to summarize with VLLM")
158
  # def summarize_with_vllm(api_key, input_data, custom_prompt_input):
159
- summary = summarize_with_vllm(json_file_path, article_custom_prompt, system_message)
160
-
161
  elif api_name.lower() == "local-llm":
162
  logging.debug(f"MAIN: Trying to summarize with Local LLM")
163
  summary = summarize_with_local_llm(json_file_path, article_custom_prompt, system_message)
164
 
 
 
 
 
 
 
 
 
 
 
165
  elif api_name.lower() == "huggingface":
166
  logging.debug(f"MAIN: Trying to summarize with huggingface")
167
  # def summarize_with_huggingface(api_key, input_data, custom_prompt_arg):
 
26
  # 3rd-Party Imports
27
  #
28
  # Local Imports
29
+ from App_Function_Libraries.Utils.Utils import sanitize_filename, load_comprehensive_config
30
  from App_Function_Libraries.Web_Scraping.Article_Extractor_Lib import scrape_article
31
+ from App_Function_Libraries.Summarization.Local_Summarization_Lib import summarize_with_llama, summarize_with_oobabooga, \
32
+ summarize_with_tabbyapi, \
33
+ summarize_with_vllm, summarize_with_kobold, save_summary_to_file, summarize_with_local_llm, summarize_with_ollama, \
34
+ summarize_with_custom_openai
35
  from App_Function_Libraries.Summarization.Summarization_General_Lib import summarize_with_openai, summarize_with_anthropic, summarize_with_cohere, \
36
  summarize_with_groq, summarize_with_openrouter, summarize_with_deepseek, summarize_with_huggingface, \
37
  summarize_with_mistral
 
106
 
107
  with open(json_file_path, 'w') as json_file:
108
  json.dump([{'text': content}], json_file, indent=2)
109
+ config = load_comprehensive_config()
110
  try:
111
  if api_name.lower() == 'openai':
112
  # def summarize_with_openai(api_key, input_data, custom_prompt_arg)
 
140
  elif api_name.lower() == "llama.cpp":
141
  logging.debug(f"MAIN: Trying to summarize with Llama.cpp")
142
  # def summarize_with_llama(api_url, file_path, token, custom_prompt)
143
+ summary = summarize_with_llama(json_file_path, article_custom_prompt, config['Local-API']['llama_api_key'], None, system_message)
 
144
  elif api_name.lower() == "kobold":
145
  logging.debug(f"MAIN: Trying to summarize with Kobold.cpp")
146
  # def summarize_with_kobold(input_data, kobold_api_token, custom_prompt_input, api_url):
 
157
  elif api_name.lower() == "vllm":
158
  logging.debug(f"MAIN: Trying to summarize with VLLM")
159
  # def summarize_with_vllm(api_key, input_data, custom_prompt_input):
160
+ summary = summarize_with_vllm(json_file_path, article_custom_prompt, None, None, system_message)
 
161
  elif api_name.lower() == "local-llm":
162
  logging.debug(f"MAIN: Trying to summarize with Local LLM")
163
  summary = summarize_with_local_llm(json_file_path, article_custom_prompt, system_message)
164
 
165
+ elif api_name.lower() == "ollama":
166
+ logging.debug(f"MAIN: Trying to summarize with OLLAMA")
167
+ # def summarize_with_ollama(input_data, api_key, custom_prompt, api_url):
168
+ summary = summarize_with_ollama(json_file_path, article_custom_prompt, api_key, None, system_message, None)
169
+
170
+ elif api_name == "custom_openai_api":
171
+ logging.debug(f"MAIN: Trying to summarize with Custom_OpenAI API")
172
+ summary = summarize_with_custom_openai(json_file_path, article_custom_prompt, api_key, temp=None, system_message=None)
173
+
174
+
175
  elif api_name.lower() == "huggingface":
176
  logging.debug(f"MAIN: Trying to summarize with huggingface")
177
  # def summarize_with_huggingface(api_key, input_data, custom_prompt_arg):