|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
import sys |
|
import unittest |
|
|
|
|
|
git_repo_path = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) |
|
sys.path.append(os.path.join(git_repo_path, "utils")) |
|
|
|
import check_dummies |
|
from check_dummies import create_dummy_files, create_dummy_object, find_backend, read_init |
|
|
|
|
|
|
|
check_dummies.PATH_TO_DIFFUSERS = os.path.join(git_repo_path, "src", "diffusers") |
|
|
|
|
|
class CheckDummiesTester(unittest.TestCase): |
|
def test_find_backend(self): |
|
simple_backend = find_backend(" if not is_torch_available():") |
|
self.assertEqual(simple_backend, "torch") |
|
|
|
|
|
|
|
|
|
double_backend = find_backend(" if not (is_torch_available() and is_transformers_available()):") |
|
self.assertEqual(double_backend, "torch_and_transformers") |
|
|
|
|
|
|
|
|
|
|
|
|
|
triple_backend = find_backend( |
|
" if not (is_torch_available() and is_transformers_available() and is_onnx_available()):" |
|
) |
|
self.assertEqual(triple_backend, "torch_and_transformers_and_onnx") |
|
|
|
def test_read_init(self): |
|
objects = read_init() |
|
|
|
self.assertIn("torch", objects) |
|
self.assertIn("torch_and_transformers", objects) |
|
self.assertIn("flax_and_transformers", objects) |
|
self.assertIn("torch_and_transformers_and_onnx", objects) |
|
|
|
|
|
self.assertIn("UNet2DModel", objects["torch"]) |
|
self.assertIn("FlaxUNet2DConditionModel", objects["flax"]) |
|
self.assertIn("StableDiffusionPipeline", objects["torch_and_transformers"]) |
|
self.assertIn("FlaxStableDiffusionPipeline", objects["flax_and_transformers"]) |
|
self.assertIn("LMSDiscreteScheduler", objects["torch_and_scipy"]) |
|
self.assertIn("OnnxStableDiffusionPipeline", objects["torch_and_transformers_and_onnx"]) |
|
|
|
def test_create_dummy_object(self): |
|
dummy_constant = create_dummy_object("CONSTANT", "'torch'") |
|
self.assertEqual(dummy_constant, "\nCONSTANT = None\n") |
|
|
|
dummy_function = create_dummy_object("function", "'torch'") |
|
self.assertEqual( |
|
dummy_function, "\ndef function(*args, **kwargs):\n requires_backends(function, 'torch')\n" |
|
) |
|
|
|
expected_dummy_class = """ |
|
class FakeClass(metaclass=DummyObject): |
|
_backends = 'torch' |
|
|
|
def __init__(self, *args, **kwargs): |
|
requires_backends(self, 'torch') |
|
|
|
@classmethod |
|
def from_config(cls, *args, **kwargs): |
|
requires_backends(cls, 'torch') |
|
|
|
@classmethod |
|
def from_pretrained(cls, *args, **kwargs): |
|
requires_backends(cls, 'torch') |
|
""" |
|
dummy_class = create_dummy_object("FakeClass", "'torch'") |
|
self.assertEqual(dummy_class, expected_dummy_class) |
|
|
|
def test_create_dummy_files(self): |
|
expected_dummy_pytorch_file = """# This file is autogenerated by the command `make fix-copies`, do not edit. |
|
from ..utils import DummyObject, requires_backends |
|
|
|
|
|
CONSTANT = None |
|
|
|
|
|
def function(*args, **kwargs): |
|
requires_backends(function, ["torch"]) |
|
|
|
|
|
class FakeClass(metaclass=DummyObject): |
|
_backends = ["torch"] |
|
|
|
def __init__(self, *args, **kwargs): |
|
requires_backends(self, ["torch"]) |
|
|
|
@classmethod |
|
def from_config(cls, *args, **kwargs): |
|
requires_backends(cls, ["torch"]) |
|
|
|
@classmethod |
|
def from_pretrained(cls, *args, **kwargs): |
|
requires_backends(cls, ["torch"]) |
|
""" |
|
dummy_files = create_dummy_files({"torch": ["CONSTANT", "function", "FakeClass"]}) |
|
self.assertEqual(dummy_files["torch"], expected_dummy_pytorch_file) |
|
|