Make it compatible without extra config on Transformers.js
#16
by
radames
- opened
hi
@OriLib
, this file is need on transformers.js , with it on the repo, it makes is super simple to load it without passing extra params
after
import { AutoModel, AutoProcessor, RawImage } from '@xenova/transformers';
// Load model and processor
const model = await AutoModel.from_pretrained('briaai/RMBG-1.4', { quantized: false });
const processor = await AutoProcessor.from_pretrained('briaai/RMBG-1.4');
before
const model = AutoModel.from_pretrained("briaai/RMBG-1.4", {
// Do not require config.json to be present in the repository
config: { model_type: "custom" },
quantized: device === Devices.webgpu ? false : quantized,
device: device,
});
console.log("RUNNING WIHTT", device);
const processor = await AutoProcessor.from_pretrained("briaai/RMBG-1.4", {
// Do not require config.json to be present in the repository
config: {
do_normalize: true,
do_pad: false,
do_rescale: true,
do_resize: true,
image_mean: [0.5, 0.5, 0.5],
feature_extractor_type: "ImageFeatureExtractor",
image_std: [1, 1, 1],
resample: 2,
rescale_factor: 0.00392156862745098,
size: { width: 1024, height: 1024 },
},
});
OriLib
changed pull request status to
merged