aka7774 commited on
Commit
b19aa43
1 Parent(s): 58190b4

Update fn.py

Browse files
Files changed (1) hide show
  1. fn.py +1 -24
fn.py CHANGED
@@ -203,29 +203,6 @@ def chat(message, history = [], instruction = None, args = {}):
203
 
204
  return model_output
205
 
206
- def infer(args: dict):
207
- global cfg
208
-
209
- if 'model_name' in args:
210
- load_model(args['model_name'], args['qtype'], args['dtype'])
211
-
212
- for k in [
213
- 'instruction',
214
- 'inst_template',
215
- 'chat_template',
216
- 'max_new_tokens',
217
- 'temperature',
218
- 'top_p',
219
- 'top_k',
220
- 'repetition_penalty'
221
- ]:
222
- cfg[k] = args[k]
223
-
224
- if 'messages' in args:
225
- return chat(args['input'], args['messages'])
226
- if 'instruction' in args:
227
- return instruct(args['instruction'], args['input'])
228
-
229
  def apply_template(messages):
230
  global tokenizer, cfg
231
 
@@ -235,6 +212,6 @@ def apply_template(messages):
235
  if type(messages) is str:
236
  if cfg['inst_template']:
237
  return cfg['inst_template'].format(instruction=cfg['instruction'], input=messages)
238
- return cfg['instruction']
239
  if type(messages) is list:
240
  return tokenizer.apply_chat_template(conversation=messages, add_generation_prompt=True, tokenize=False)
 
203
 
204
  return model_output
205
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
206
  def apply_template(messages):
207
  global tokenizer, cfg
208
 
 
212
  if type(messages) is str:
213
  if cfg['inst_template']:
214
  return cfg['inst_template'].format(instruction=cfg['instruction'], input=messages)
215
+ return cfg['instruction'].format(input=messages)
216
  if type(messages) is list:
217
  return tokenizer.apply_chat_template(conversation=messages, add_generation_prompt=True, tokenize=False)