Spaces:
Running
on
A10G
Running
on
A10G
File size: 360 Bytes
bfd34e9 |
1 2 3 4 5 6 7 8 9 10 11 |
from . import attentionpatch
from . import transformerpatch
attention_forward = attentionpatch.default.forward
basic_transformer_forward = transformerpatch.default.forward
def reset():
global attention_forward, basic_transformer_forward
attention_forward = attentionpatch.default.forward
basic_transformer_forward = transformerpatch.default.forward
|