zhuqi commited on
Commit
13f36ca
1 Parent(s): 7e693ef

Upload preprocess.py

Browse files
Files changed (1) hide show
  1. preprocess.py +232 -0
preprocess.py ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import json
3
+ import os
4
+ from zipfile import ZipFile, ZIP_DEFLATED
5
+ from shutil import rmtree
6
+
7
+ ontology = {
8
+ 'domains': {
9
+ 'restaurant': {
10
+ 'description': 'search for a restaurant to dine',
11
+ 'slots': {
12
+ 'food': {
13
+ 'description': 'food type of the restaurant',
14
+ 'is_categorical': False,
15
+ 'possible_values': []
16
+ },
17
+ 'area': {
18
+ 'description': 'area of the restaurant',
19
+ 'is_categorical': True,
20
+ 'possible_values': ["east", "west", "centre", "north", "south"]
21
+ },
22
+ 'postcode': {
23
+ 'description': 'postal code of the restaurant',
24
+ 'is_categorical': False,
25
+ 'possible_values': []
26
+ },
27
+ 'phone': {
28
+ 'description': 'phone number of the restaurant',
29
+ 'is_categorical': False,
30
+ 'possible_values': []
31
+ },
32
+ 'address': {
33
+ 'description': 'address of the restaurant',
34
+ 'is_categorical': False,
35
+ 'possible_values': []
36
+ },
37
+ 'price range': {
38
+ 'description': 'price range of the restaurant',
39
+ 'is_categorical': True,
40
+ 'possible_values': ["expensive", "moderate", "cheap"]
41
+ },
42
+ 'name': {
43
+ 'description': 'name of the restaurant',
44
+ 'is_categorical': False,
45
+ 'possible_values': []
46
+ }
47
+ }
48
+ }
49
+ },
50
+ 'intents': {
51
+ 'inform': {
52
+ 'description': 'system informs user the value of a slot'
53
+ },
54
+ 'request': {
55
+ 'description': 'system asks the user to provide value of a slot'
56
+ }
57
+ },
58
+ 'state': {
59
+ 'restaurant': {
60
+ 'food': '',
61
+ 'area': '',
62
+ 'postcode': '',
63
+ 'phone': '',
64
+ 'address': '',
65
+ 'price range': '',
66
+ 'name': ''
67
+ }
68
+ },
69
+ "dialogue_acts": {
70
+ "categorical": {},
71
+ "non-categorical": {},
72
+ "binary": {}
73
+ }
74
+ }
75
+
76
+
77
+ def convert_da(da, utt):
78
+ global ontology
79
+
80
+ converted = {
81
+ 'binary': [],
82
+ 'categorical': [],
83
+ 'non-categorical': []
84
+ }
85
+
86
+ for s, v in da:
87
+ if s == 'request':
88
+ converted['binary'].append({
89
+ 'intent': 'request',
90
+ 'domain': 'restaurant',
91
+ 'slot': v,
92
+ })
93
+
94
+ else:
95
+ slot_type = 'categorical' if ontology['domains']['restaurant']['slots'][s]['is_categorical'] else 'non-categorical'
96
+
97
+ v = v.strip()
98
+ if v != 'dontcare' and ontology['domains']['restaurant']['slots'][s]['is_categorical']:
99
+ if v == 'center':
100
+ v = 'centre'
101
+ elif v == 'east side':
102
+ v = 'east'
103
+ assert v in ontology['domains']['restaurant']['slots'][s]['possible_values'], print([s,v, utt])
104
+
105
+ converted[slot_type].append({
106
+ 'intent': 'inform',
107
+ 'domain': 'restaurant',
108
+ 'slot': s,
109
+ 'value': v
110
+ })
111
+
112
+ if slot_type == 'non-categorical' and v != 'dontcare':
113
+
114
+ start = utt.lower().find(v)
115
+
116
+ if start != -1:
117
+ end = start + len(v)
118
+ converted[slot_type][-1]['start'] = start
119
+ converted[slot_type][-1]['end'] = end
120
+ converted[slot_type][-1]['value'] = utt[start:end]
121
+
122
+ return converted
123
+
124
+
125
+ def preprocess():
126
+ original_data_dir = 'woz'
127
+ new_data_dir = 'data'
128
+ os.makedirs(new_data_dir, exist_ok=True)
129
+
130
+ dataset = 'woz'
131
+ splits = ['train', 'validation', 'test']
132
+ domain = 'restaurant'
133
+ dialogues_by_split = {split: [] for split in splits}
134
+ global ontology
135
+
136
+ for split in splits:
137
+ if split != 'validation':
138
+ filename = os.path.join(original_data_dir, f'woz_{split}_en.json')
139
+ else:
140
+ filename = os.path.join(original_data_dir, 'woz_validate_en.json')
141
+ if not os.path.exists(filename):
142
+ raise FileNotFoundError(
143
+ f'cannot find {filename}, should manually download from https://github.com/nmrksic/neural-belief-tracker/tree/master/data/woz')
144
+
145
+ data = json.load(open(filename))
146
+
147
+ for item in data:
148
+ dialogue = {
149
+ 'dataset': dataset,
150
+ 'data_split': split,
151
+ 'dialogue_id': f'{dataset}-{split}-{len(dialogues_by_split[split])}',
152
+ 'original_id': item['dialogue_idx'],
153
+ 'domains': [domain],
154
+ 'turns': []
155
+ }
156
+
157
+ turns = item['dialogue']
158
+ n_turn = len(turns)
159
+
160
+ for i in range(n_turn):
161
+ sys_utt = turns[i]['system_transcript'].strip()
162
+ usr_utt = turns[i]['transcript'].strip()
163
+ usr_da = turns[i]['turn_label']
164
+
165
+ for s, v in usr_da:
166
+ if s == 'request':
167
+ assert v in ontology['domains']['restaurant']['slots']
168
+ else:
169
+ assert s in ontology['domains']['restaurant']['slots']
170
+
171
+ if i != 0:
172
+ dialogue['turns'].append({
173
+ 'utt_idx': len(dialogue['turns']),
174
+ 'speaker': 'system',
175
+ 'utterance': sys_utt,
176
+ })
177
+
178
+ cur_state = copy.deepcopy(ontology['state'])
179
+ for act_slots in turns[i]['belief_state']:
180
+ act, slots = act_slots['act'], act_slots['slots']
181
+ if act == 'inform':
182
+ for s, v in slots:
183
+ v = v.strip()
184
+ if v != 'dontcare' and ontology['domains']['restaurant']['slots'][s]['is_categorical']:
185
+ if v not in ontology['domains']['restaurant']['slots'][s]['possible_values']:
186
+ if v == 'center':
187
+ v = 'centre'
188
+ elif v == 'east side':
189
+ v = 'east'
190
+ assert v in ontology['domains']['restaurant']['slots'][s]['possible_values']
191
+
192
+ cur_state[domain][s] = v
193
+
194
+ cur_usr_da = convert_da(usr_da, usr_utt)
195
+
196
+ # add to dialogue_acts dictionary in the ontology
197
+ for da_type in cur_usr_da:
198
+ das = cur_usr_da[da_type]
199
+ for da in das:
200
+ ontology["dialogue_acts"][da_type].setdefault((da['intent'], da['domain'], da['slot']), {})
201
+ ontology["dialogue_acts"][da_type][(da['intent'], da['domain'], da['slot'])]['user'] = True
202
+
203
+ dialogue['turns'].append({
204
+ 'utt_idx': len(dialogue['turns']),
205
+ 'speaker': 'user',
206
+ 'utterance': usr_utt,
207
+ 'state': cur_state,
208
+ 'dialogue_acts': cur_usr_da,
209
+ })
210
+
211
+ dialogues_by_split[split].append(dialogue)
212
+
213
+ dialogues = []
214
+ for split in splits:
215
+ dialogues += dialogues_by_split[split]
216
+ for da_type in ontology['dialogue_acts']:
217
+ ontology["dialogue_acts"][da_type] = sorted([str(
218
+ {'user': speakers.get('user', False), 'system': speakers.get('system', False), 'intent': da[0],
219
+ 'domain': da[1], 'slot': da[2]}) for da, speakers in ontology["dialogue_acts"][da_type].items()])
220
+ json.dump(dialogues[:10], open(f'dummy_data.json', 'w', encoding='utf-8'), indent=2, ensure_ascii=False)
221
+ json.dump(ontology, open(f'{new_data_dir}/ontology.json', 'w', encoding='utf-8'), indent=2, ensure_ascii=False)
222
+ json.dump(dialogues, open(f'{new_data_dir}/dialogues.json', 'w', encoding='utf-8'), indent=2, ensure_ascii=False)
223
+ with ZipFile('data.zip', 'w', ZIP_DEFLATED) as zf:
224
+ for filename in os.listdir(new_data_dir):
225
+ zf.write(f'{new_data_dir}/{filename}')
226
+ rmtree(original_data_dir)
227
+ rmtree(new_data_dir)
228
+ return dialogues, ontology
229
+
230
+
231
+ if __name__ == '__main__':
232
+ preprocess()