Skip to content
Merged
6 changes: 3 additions & 3 deletions colossalai/inference/core/llm_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,11 @@ class LLMEngine(BaseEngine):

def __init__(
self,
model_or_path: nn.Module | str,
tokenizer: PreTrainedTokenizer | PreTrainedTokenizerFast = None,
model_or_path: Union[nn.Module, str],
tokenizer: Union[PreTrainedTokenizer, PreTrainedTokenizerFast] = None,
inference_config: InferenceConfig = None,
verbose: bool = False,
model_policy: Policy | type[Policy] = None,
model_policy: Union[Policy, type[Policy]] = None,
) -> None:
self.inference_config = inference_config
self.dtype = inference_config.dtype
Expand Down
2 changes: 0 additions & 2 deletions colossalai/inference/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,8 +186,6 @@ def get_model_type(model_or_path: Union[nn.Module, str, DiffusionPipeline]):
"""

try:
from diffusers import DiffusionPipeline

DiffusionPipeline.load_config(model_or_path)
return ModelType.DIFFUSION_MODEL
except:
Expand Down
2 changes: 2 additions & 0 deletions examples/inference/stable_diffusion/test_ci.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/bash
echo "Skip the test (this test is slow)"
1 change: 0 additions & 1 deletion requirements/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
diffusers
pytest
coverage==7.2.3
git+https://github.com/hpcaitech/pytest-testmon
Expand Down