no evo
Browse files- app.py +3 -3
- configs/configs_base.py +1 -1
app.py
CHANGED
|
@@ -40,9 +40,9 @@ custom_css = """
|
|
| 40 |
|
| 41 |
|
| 42 |
os.environ["LAYERNORM_TYPE"] = "fast_layernorm"
|
| 43 |
-
os.environ["USE_DEEPSPEED_EVO_ATTTENTION"] = "
|
| 44 |
# Set environment variable in the script
|
| 45 |
-
os.environ['CUTLASS_PATH'] = './cutlass'
|
| 46 |
|
| 47 |
# reps = [
|
| 48 |
# {
|
|
@@ -272,7 +272,7 @@ logging.basicConfig(
|
|
| 272 |
filemode="w",
|
| 273 |
)
|
| 274 |
configs_base["use_deepspeed_evo_attention"] = (
|
| 275 |
-
os.environ.get("USE_DEEPSPEED_EVO_ATTTENTION", False) == "
|
| 276 |
)
|
| 277 |
arg_str = "--seeds 101 --dump_dir ./output --input_json_path ./examples/example.json --model.N_cycle 10 --sample_diffusion.N_sample 5 --sample_diffusion.N_step 200 "
|
| 278 |
configs = {**configs_base, **{"data": data_configs}, **inference_configs}
|
|
|
|
| 40 |
|
| 41 |
|
| 42 |
os.environ["LAYERNORM_TYPE"] = "fast_layernorm"
|
| 43 |
+
os.environ["USE_DEEPSPEED_EVO_ATTTENTION"] = "False"
|
| 44 |
# Set environment variable in the script
|
| 45 |
+
#os.environ['CUTLASS_PATH'] = './cutlass'
|
| 46 |
|
| 47 |
# reps = [
|
| 48 |
# {
|
|
|
|
| 272 |
filemode="w",
|
| 273 |
)
|
| 274 |
configs_base["use_deepspeed_evo_attention"] = (
|
| 275 |
+
os.environ.get("USE_DEEPSPEED_EVO_ATTTENTION", False) == "False"
|
| 276 |
)
|
| 277 |
arg_str = "--seeds 101 --dump_dir ./output --input_json_path ./examples/example.json --model.N_cycle 10 --sample_diffusion.N_sample 5 --sample_diffusion.N_step 200 "
|
| 278 |
configs = {**configs_base, **{"data": data_configs}, **inference_configs}
|
configs/configs_base.py
CHANGED
|
@@ -103,7 +103,7 @@ model_configs = {
|
|
| 103 |
), # NOTE: Number of blocks in each activation checkpoint, if None, no checkpointing is performed.
|
| 104 |
# switch of kernels
|
| 105 |
"use_memory_efficient_kernel": False,
|
| 106 |
-
"use_deepspeed_evo_attention":
|
| 107 |
"use_flash": False,
|
| 108 |
"use_lma": False,
|
| 109 |
"use_xformer": False,
|
|
|
|
| 103 |
), # NOTE: Number of blocks in each activation checkpoint, if None, no checkpointing is performed.
|
| 104 |
# switch of kernels
|
| 105 |
"use_memory_efficient_kernel": False,
|
| 106 |
+
"use_deepspeed_evo_attention": False,
|
| 107 |
"use_flash": False,
|
| 108 |
"use_lma": False,
|
| 109 |
"use_xformer": False,
|