File size: 2,066 Bytes
8b54370
 
 
 
 
 
 
 
 
 
 
 
 
 
16d11d9
 
 
 
8b54370
16d11d9
 
8b54370
16d11d9
8b54370
 
16d11d9
8b54370
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import pandas as pd
from openai import OpenAI
from io import StringIO
import json
import os

class OpenAIConnector:

    OPENAI_ACCESS_TOKEN = os.getenv("OPENAI_ACCESS_TOKEN")

    def generate_llm_system_message(self, prefiltered_names, prefiltered_descriptions):

      #print(prefiltered_names)

      # actions_list = pd.DataFrame({
      #     'action': prefiltered_names,
      #     'descriptions': prefiltered_descriptions
      # })

      # csv_buffer = StringIO()
      # actions_list.to_csv(csv_buffer, index=False)

      actions_list = "\n".join([f"action: {action}, description: {desc}" for action, desc in zip(prefiltered_names, prefiltered_descriptions)])

      system_message = "following is a csv list of actions and their descriptions: \n"
      system_message += actions_list
      system_message += "\n\n"
      system_message += "find me all best fitting actions for the user request and order them by match. please just consider these actions and nothing else, but there might be multiple fitting actions.\n"
      system_message += 'return the actions just in form of a json with action name and short reasoning, no additional text around, no formatting, etc.: [{ "action": "Icon Library", "reason": "Shows you a list of icons you can use in Pimcore configurations." }].\n'
      system_message += 'also state when there is no fitting action for the request with a json like [{"action": null, "reason": "no fitting action found"}].'

      return system_message

    def query_open_ai(self, query, prefiltered_names, prefiltered_descriptions):
      client = OpenAI(api_key=self.OPENAI_ACCESS_TOKEN)
      system_message = self.generate_llm_system_message(prefiltered_names, prefiltered_descriptions)
      messages = [{"role": "system", "content": system_message},{"role": "user", "content": query}]
      #print(messages)

      response = client.chat.completions.create(
          model="gpt-4o",
          messages=messages,
      )
      response_message = response.choices[0].message

      return json.loads(response_message.content)