Update README.md
Browse files
README.md
CHANGED
@@ -58,6 +58,9 @@ The easiest way is to load the inference api from huggingface and second method
|
|
58 |
# Use a pipeline as a high-level helper
|
59 |
from transformers import pipeline
|
60 |
pipe = pipeline("token-classification", model="Clinical-AI-Apollo/Medical-NER", aggregation_strategy='simple')
|
|
|
|
|
|
|
61 |
|
62 |
# Load model directly
|
63 |
from transformers import AutoTokenizer, AutoModelForTokenClassification
|
|
|
58 |
# Use a pipeline as a high-level helper
|
59 |
from transformers import pipeline
|
60 |
pipe = pipeline("token-classification", model="Clinical-AI-Apollo/Medical-NER", aggregation_strategy='simple')
|
61 |
+
result = pipe('45 year old woman diagnosed with CAD')
|
62 |
+
|
63 |
+
|
64 |
|
65 |
# Load model directly
|
66 |
from transformers import AutoTokenizer, AutoModelForTokenClassification
|