Skip to content

Commit 093efff

Browse files
kohya-ssDisty0
authored andcommitted
Add wandb_run_name parameter to init_kwargs kohya-ss#1032
1 parent 05a3b75 commit 093efff

7 files changed

+14
-0
lines changed

fine_tune.py

+2
Original file line numberDiff line numberDiff line change
@@ -291,6 +291,8 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
291291

292292
if accelerator.is_main_process:
293293
init_kwargs = {}
294+
if args.wandb_run_name:
295+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
294296
if args.log_tracker_config is not None:
295297
init_kwargs = toml.load(args.log_tracker_config)
296298
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

sdxl_train.py

+2
Original file line numberDiff line numberDiff line change
@@ -457,6 +457,8 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
457457

458458
if accelerator.is_main_process:
459459
init_kwargs = {}
460+
if args.wandb_run_name:
461+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
460462
if args.log_tracker_config is not None:
461463
init_kwargs = toml.load(args.log_tracker_config)
462464
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

sdxl_train_control_net_lllite.py

+2
Original file line numberDiff line numberDiff line change
@@ -342,6 +342,8 @@ def train(args):
342342

343343
if accelerator.is_main_process:
344344
init_kwargs = {}
345+
if args.wandb_run_name:
346+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
345347
if args.log_tracker_config is not None:
346348
init_kwargs = toml.load(args.log_tracker_config)
347349
accelerator.init_trackers(

train_controlnet.py

+2
Original file line numberDiff line numberDiff line change
@@ -336,6 +336,8 @@ def train(args):
336336
)
337337
if accelerator.is_main_process:
338338
init_kwargs = {}
339+
if args.wandb_run_name:
340+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
339341
if args.log_tracker_config is not None:
340342
init_kwargs = toml.load(args.log_tracker_config)
341343
accelerator.init_trackers(

train_db.py

+2
Original file line numberDiff line numberDiff line change
@@ -268,6 +268,8 @@ def train(args):
268268

269269
if accelerator.is_main_process:
270270
init_kwargs = {}
271+
if args.wandb_run_name:
272+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
271273
if args.log_tracker_config is not None:
272274
init_kwargs = toml.load(args.log_tracker_config)
273275
accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

train_textual_inversion.py

+2
Original file line numberDiff line numberDiff line change
@@ -504,6 +504,8 @@ def train(self, args):
504504

505505
if accelerator.is_main_process:
506506
init_kwargs = {}
507+
if args.wandb_run_name:
508+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
507509
if args.log_tracker_config is not None:
508510
init_kwargs = toml.load(args.log_tracker_config)
509511
accelerator.init_trackers(

train_textual_inversion_XTI.py

+2
Original file line numberDiff line numberDiff line change
@@ -394,6 +394,8 @@ def train(args):
394394

395395
if accelerator.is_main_process:
396396
init_kwargs = {}
397+
if args.wandb_run_name:
398+
init_kwargs['wandb'] = {'name': args.wandb_run_name}
397399
if args.log_tracker_config is not None:
398400
init_kwargs = toml.load(args.log_tracker_config)
399401
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

0 commit comments

Comments
 (0)