arthrod commited on
Commit
5c24e5a
1 Parent(s): 8d1aa2c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -49,7 +49,7 @@ To use the model:
49
  import json
50
  from transformers import AutoModelForCausalLM, AutoTokenizer
51
 
52
- def predict_NuExtract(model, tokenizer, texts, template, batch_size=1, max_length=10_000, max_new_tokens=4_000):
53
  template = json.dumps(json.loads(template), indent=4)
54
  prompts = [f"""<|input|>\n### Template:\n{template}\n### Text:\n{text}\n\n<|output|>""" for text in texts]
55
 
@@ -104,7 +104,7 @@ Sliding window prompting:
104
  ```python
105
  import json
106
 
107
- MAX_INPUT_SIZE = 20_000
108
  MAX_NEW_TOKENS = 6000
109
 
110
  def clean_json_text(text):
 
49
  import json
50
  from transformers import AutoModelForCausalLM, AutoTokenizer
51
 
52
+ def predict_NuExtract(model, tokenizer, texts, template, batch_size=1, max_length=10000, max_new_tokens=4000):
53
  template = json.dumps(json.loads(template), indent=4)
54
  prompts = [f"""<|input|>\n### Template:\n{template}\n### Text:\n{text}\n\n<|output|>""" for text in texts]
55
 
 
104
  ```python
105
  import json
106
 
107
+ MAX_INPUT_SIZE = 20000
108
  MAX_NEW_TOKENS = 6000
109
 
110
  def clean_json_text(text):