Spaces:
Sleeping
Sleeping
File size: 1,980 Bytes
8f3e4ca cd855de 8f3e4ca cd855de 8f3e4ca fcc706c b639f33 8eea7aa 8f3e4ca b639f33 8eea7aa 8f3e4ca cd855de 8f3e4ca cd855de 8eea7aa fcc706c 8eea7aa b639f33 cd855de 8f3e4ca 5238cd4 8f3e4ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import json
import logging
import time
from argparse import ArgumentParser
import evaluate
import numpy as np
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
parser = ArgumentParser(
description="Compute the matching series score between two time series freezed in a numpy array"
)
parser.add_argument("predictions", type=str, help="Path to the numpy array containing the predictions")
parser.add_argument("references", type=str, help="Path to the numpy array containing the references")
parser.add_argument("--output", type=str, help="Path to the output file")
parser.add_argument("--batch_size", type=int, help="Batch size to use for the computation")
parser.add_argument("--num_processes", type=int, help="Batch size to use for the computation", default=1)
parser.add_argument("--dtype", type=str, help="Data type to use for the computation", default="float32")
parser.add_argument("--debug", action="store_true", help="Debug mode")
args = parser.parse_args()
if not args.predictions or not args.references:
raise ValueError("You must provide the path to the predictions and references numpy arrays")
predictions = np.load(args.predictions).astype(args.dtype)
references = np.load(args.references).astype(args.dtype)
if args.debug:
predictions = predictions[:1000]
references = references[:1000]
logger.info(f"predictions shape: {predictions.shape}")
logger.info(f"references shape: {references.shape}")
import matching_series
s = time.time()
metric = matching_series.matching_series()
# metric = evaluate.load("matching_series.py")
results = metric.compute(
predictions=predictions,
references=references,
batch_size=args.batch_size,
num_processes=args.num_process,
return_each_features=True,
return_coverages=True,
dtype=args.dtype,
)
logger.info(f"Time taken: {time.time() - s}")
print(json.dumps(results))
if args.output:
with open(args.output, "w") as f:
json.dump(results, f)
|