write to file for each url
Browse files
src/retrieval/scraper_for_knowledge_store.py
CHANGED
@@ -146,8 +146,8 @@ if __name__ == "__main__":
|
|
146 |
except Exception as e:
|
147 |
total_failed += 1
|
148 |
|
149 |
-
|
150 |
-
|
151 |
|
152 |
print(f"Output for {args.tsv_input_file} saved to {json_output_path}")
|
153 |
elapsed_time = time.time() - st
|
|
|
146 |
except Exception as e:
|
147 |
total_failed += 1
|
148 |
|
149 |
+
json_file.write(json.dumps(json_data, ensure_ascii=False) + "\n")
|
150 |
+
json_file.flush()
|
151 |
|
152 |
print(f"Output for {args.tsv_input_file} saved to {json_output_path}")
|
153 |
elapsed_time = time.time() - st
|