Environment:
	Python: 3.10.11
	PyTorch: 2.0.1
	Torchvision: 0.15.2
	CUDA: 11.7
	CUDNN: 8500
	NumPy: 1.24.3
	PIL: 9.4.0
	Testing environment: [2]
Args:
	algorithm: Selective_KD
	checkpoint_freq: 300
	data_dir: ./domainbed/data
	dataset: PACS
	holdout_fraction: 0.2
	hparams: {
    "resnet18": false,
    "resnet_dropout": 0,
    "nonlinear_classifier": false,
    "data_augmentation": true,
    "clip_backbone": "ViT-B/32",
    "student_model": "resnet",
    "SMA": true,
    "batch_size": 32
}
	hparams_seed: 0
	output_dir: sweep/ablation3/outputs/9282d6d202ea46dc9cd3b1d123397511
	save_linear_probed_clip: False
	save_model_every_checkpoint: False
	seed: 276474207
	skip_model_save: False
	steps: 5001
	sweep: True
	task: domain_generalization
	test_envs: [2]
	trial_seed: 0
	uda_holdout_fraction: 0
	visualize: False
Not saving models
HParams:
	SMA: True
	batch_size: 32
	class_balanced: False
	clip_backbone: ViT-B/32
	data_augmentation: True
	lambda1: 0.5
	lambda2: 0.5
	last_k_epoch: 0.25
	lr: 5e-05
	nonlinear_classifier: False
	resnet18: False
	resnet_dropout: 0
	student_model: resnet
	temperature: 3
	weight_decay: 0.0
	worst_case_p: 0.3333333333333333
using augment transform
using augment transform
using normal transform
using augment transform
using device:  cuda
Using ViT-B/32...
constructing student model
using resnet 50
Using SMA
n_steps 5001
checkpoint_freq 300
agg_test_acc  agg_val_acc   env0_in_acc   env0_out_acc  env1_in_acc   env1_out_acc  env2_in_acc   env2_out_acc  env3_in_acc   env3_out_acc  epoch         loss          mem_gb        step          step_time    
0.1380988023  0.1831621903  0.1482611348  0.1466992665  0.1812366738  0.1709401709  0.1414670659  0.1347305389  0.2089694656  0.2318471338  0.0000000000  6.6232900620  2.3338298798  0             1.4337253571 
0.9925149696  0.9554901989  0.9853569250  0.9486552567  0.9802771855  0.9636752137  0.9940119760  0.9910179641  0.9602417303  0.9541401274  7.1856287425  2.0118452380  2.5948085785  300           0.1555042529 
0.9921407181  0.9565585562  0.9969493594  0.9535452323  0.9936034115  0.9658119658  0.9932634731  0.9910179641  0.9726463104  0.9503184713  14.371257485  0.8360520820  2.5948085785  600           0.1751382534 
0.9943862270  0.9575790802  0.9969493594  0.9511002445  0.9952025586  0.9636752137  0.9947604790  0.9940119760  0.9815521628  0.9579617834  21.556886227  0.7308700085  2.5948085785  900           0.1766851187 
0.9936377241  0.9650168265  0.9987797437  0.9657701711  0.9989339019  0.9636752137  0.9962574850  0.9910179641  0.9850508906  0.9656050955  28.742514970  0.6721423687  2.5948085785  1200          0.1769380474 
0.9932634726  0.9639278182  1.0000000000  0.9633251834  0.9978678038  0.9679487179  0.9955089820  0.9910179641  0.9866412214  0.9605095541  35.928143712  0.6409841295  2.5948085785  1500          0.1759838104 
0.9925149696  0.9638114631  0.9987797437  0.9608801956  0.9978678038  0.9636752137  0.9940119760  0.9910179641  0.9901399491  0.9668789809  43.113772455  0.6182019694  2.5948085785  1800          0.1755393791 
0.9925149696  0.9719408089  0.9975594875  0.9779951100  0.9984008529  0.9722222222  0.9940119760  0.9910179641  0.9901399491  0.9656050955  50.299401197  0.6057247298  2.5948085785  2100          0.1759306010 
0.9921407181  0.9670714468  0.9993898719  0.9706601467  0.9989339019  0.9636752137  0.9932634731  0.9910179641  0.9936386768  0.9668789809  57.485029940  0.5902690009  2.5948085785  2400          0.1744923655 
0.9921407181  0.9662015765  0.9993898719  0.9633251834  0.9984008529  0.9722222222  0.9932634731  0.9910179641  0.9936386768  0.9630573248  64.670658682  0.5687680920  2.5948085785  2700          0.1742083295 
0.9921407181  0.9721599470  0.9993898719  0.9731051345  0.9994669510  0.9764957265  0.9932634731  0.9910179641  0.9917302799  0.9668789809  71.856287425  0.5651713678  2.5948085785  3000          0.1732312902 
0.9906437121  0.9672563238  1.0000000000  0.9682151589  1.0000000000  0.9679487179  0.9932634731  0.9880239521  0.9952290076  0.9656050955  79.041916167  0.5527185317  2.5948085785  3300          0.1723654294 
0.9910179636  0.9698519654  1.0000000000  0.9657701711  0.9978678038  0.9743589744  0.9940119760  0.9880239521  0.9942748092  0.9694267516  86.227544910  0.5348780965  2.5948085785  3600          0.1701244807 
0.9910179636  0.9660373128  0.9993898719  0.9731051345  0.9994669510  0.9594017094  0.9940119760  0.9880239521  0.9974554707  0.9656050955  93.413173652  0.4517370468  5.3919634819  3900          0.1865780505 
0.9910179636  0.9710779422  1.0000000000  0.9779951100  1.0000000000  0.9658119658  0.9940119760  0.9880239521  0.9961832061  0.9694267516  100.59880239  0.3290412802  5.3919634819  4200          0.2021569864 
0.9906437121  0.9681055807  0.9993898719  0.9682151589  1.0000000000  0.9679487179  0.9932634731  0.9880239521  0.9971374046  0.9681528662  107.78443113  0.3110214403  5.3919634819  4500          0.2099334423 
0.9906437121  0.9754541916  1.0000000000  0.9804400978  0.9994669510  0.9764957265  0.9932634731  0.9880239521  0.9965012723  0.9694267516  114.97005988  0.3049151919  5.3919634819  4800          0.2081783112 
0.9891467061  0.9718723247  0.9987797437  0.9731051345  0.9989339019  0.9743589744  0.9932634731  0.9850299401  0.9968193384  0.9681528662  119.76047904  0.2905270369  5.3919634819  5000          0.2095342970 
