Datasets:

Languages:
Vietnamese
ArXiv:
License:
holylovenia commited on
Commit
618286f
1 Parent(s): 5b7ebd8

Upload utils.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. utils.py +61 -0
utils.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import typing as T
2
+
3
+ from conllu.exceptions import ParseException
4
+ from conllu.models import Metadata, TokenList
5
+ from conllu.parser import (DEFAULT_FIELD_PARSERS, DEFAULT_FIELDS,
6
+ _FieldParserType, _MetadataParserType,
7
+ parse_comment_line, parse_line)
8
+
9
+ imputed_sent_id: int = 1
10
+
11
+
12
+ def parse_token_and_impute_metadata(data: str, fields: T.Optional[T.Sequence[str]] = None, field_parsers: T.Optional[T.Dict[str, _FieldParserType]] = None, metadata_parsers: T.Optional[T.Dict[str, _MetadataParserType]] = None) -> TokenList:
13
+ """
14
+ Overrides conllu.parse_token_and_metadata via monkey patching.
15
+ This function imputes the following metadata, if these are not found in the .conllu file:
16
+ - sent_id (int): an integer identifier for each sentence.
17
+ - text (str): a concatenated string of token forms. This assumes that all token forms
18
+ are separated with an empty space ' ', and does not consider the `SpaceAfter` field.
19
+ """
20
+
21
+ if not data:
22
+ raise ParseException("Can't create TokenList, no data sent to constructor.")
23
+
24
+ fields = fields or DEFAULT_FIELDS
25
+ global imputed_sent_id
26
+
27
+ if not field_parsers:
28
+ field_parsers = DEFAULT_FIELD_PARSERS.copy()
29
+ elif sorted(field_parsers.keys()) != sorted(fields):
30
+ new_field_parsers = DEFAULT_FIELD_PARSERS.copy()
31
+ new_field_parsers.update(field_parsers)
32
+ field_parsers = new_field_parsers
33
+
34
+ tokens = []
35
+ metadata = Metadata()
36
+
37
+ for line in data.split('\n'):
38
+ line = line.strip()
39
+
40
+ if not line:
41
+ continue
42
+
43
+ if line.startswith('#'):
44
+ pairs = parse_comment_line(line, metadata_parsers=metadata_parsers)
45
+ for key, value in pairs:
46
+ metadata[key] = value
47
+
48
+ else:
49
+ tokens.append(parse_line(line, fields, field_parsers))
50
+
51
+ if 'sent_id' not in metadata:
52
+ metadata['sent_id'] = str(imputed_sent_id)
53
+ imputed_sent_id += 1
54
+
55
+ if 'text' not in metadata:
56
+ imputed_text = ""
57
+ for token in tokens:
58
+ imputed_text += str(token['form']) + " "
59
+ metadata['text'] = imputed_text
60
+
61
+ return TokenList(tokens, metadata, default_fields=fields)