Environment:
	Python: 3.10.11
	PyTorch: 2.0.1
	Torchvision: 0.15.2
	CUDA: 11.7
	CUDNN: 8500
	NumPy: 1.24.3
	PIL: 9.4.0
	Testing environment: [0]
Args:
	algorithm: Selective_KD
	checkpoint_freq: 300
	data_dir: ./domainbed/data
	dataset: VLCS
	holdout_fraction: 0.2
	hparams: {
    "resnet18": false,
    "resnet_dropout": 0,
    "nonlinear_classifier": false,
    "data_augmentation": true,
    "clip_backbone": "ViT-B/32",
    "student_model": "resnet",
    "SMA": true,
    "batch_size": 32
}
	hparams_seed: 0
	output_dir: sweep/ablation3/outputs/78dc1f4daa1a03b72ce0b3e88f2ace7d
	save_linear_probed_clip: False
	save_model_every_checkpoint: False
	seed: 641189843
	skip_model_save: False
	steps: 5001
	sweep: True
	task: domain_generalization
	test_envs: [0]
	trial_seed: 2
	uda_holdout_fraction: 0
	visualize: False
Not saving models
HParams:
	SMA: True
	batch_size: 32
	class_balanced: False
	clip_backbone: ViT-B/32
	data_augmentation: True
	lambda1: 0.5
	lambda2: 0.5
	last_k_epoch: 0.25
	lr: 5e-05
	nonlinear_classifier: False
	resnet18: False
	resnet_dropout: 0
	student_model: resnet
	temperature: 3
	weight_decay: 0.0
	worst_case_p: 0.3333333333333333
using normal transform
using augment transform
using augment transform
using augment transform
using device:  cuda
Using ViT-B/32...
constructing student model
using resnet 50
Using SMA
n_steps 5001
checkpoint_freq 300
agg_test_acc  agg_val_acc   env0_in_acc   env0_out_acc  env1_in_acc   env1_out_acc  env2_in_acc   env2_out_acc  env3_in_acc   env3_out_acc  epoch         loss          mem_gb        step          step_time    
0.4045936394  0.3126121649  0.4169611307  0.3922261484  0.3152941176  0.3333333333  0.2604722011  0.2652439024  0.3339503887  0.3392592593  0.0000000000  3.9971804619  2.3337798119  0             1.6730065346 
0.9430212009  0.8350608988  0.9461130742  0.9399293286  0.7807058824  0.7796610169  0.8830921554  0.8277439024  0.9122547205  0.8977777778  8.4805653710  1.5614368840  2.5977830887  300           0.6559030604 
0.9867491161  0.8278285203  0.9876325088  0.9858657244  0.8004705882  0.7702448211  0.9211728865  0.8125000000  0.9400222140  0.9007407407  16.961130742  1.2173900872  2.5977830887  600           0.6512332098 
0.9893992928  0.8291830746  0.9893992933  0.9893992933  0.8338823529  0.7815442561  0.9436405179  0.8185975610  0.9603850426  0.8874074074  25.441696113  1.0472445554  2.5977830887  900           0.6445682383 
0.9885159006  0.8315908647  0.9911660777  0.9858657244  0.8640000000  0.7721280603  0.9634424981  0.8307926829  0.9726027397  0.8918518519  33.922261484  0.9157337844  2.5977830887  1200          0.6442083112 
0.9840989394  0.8391370653  0.9893992933  0.9787985866  0.8903529412  0.8022598870  0.9729626809  0.8277439024  0.9807478712  0.8874074074  42.402826855  0.8161491577  2.5977830887  1500          0.6426119399 
0.9849823317  0.8350169310  0.9911660777  0.9787985866  0.9204705882  0.7871939736  0.9832444783  0.8185975610  0.9866716031  0.8992592593  50.883392226  0.7921864247  2.5977830887  1800          0.6441026966 
0.9845406356  0.8357591008  0.9902826855  0.9787985866  0.9327058824  0.7890772128  0.9881949733  0.8307926829  0.9888930026  0.8874074074  59.363957597  0.7507745405  2.5977830887  2100          0.6383791741 
0.9840989394  0.8451334469  0.9893992933  0.9787985866  0.9463529412  0.8097928437  0.9908606245  0.8307926829  0.9929655683  0.8948148148  67.844522968  0.7441320304  2.5977830887  2400          0.6581569990 
0.9840989394  0.8377487123  0.9893992933  0.9787985866  0.9595294118  0.7890772128  0.9939070830  0.8323170732  0.9944465013  0.8918518519  76.325088339  0.7262437123  2.5977830887  2700          0.6549887005 
0.9840989394  0.8349974351  0.9893992933  0.9787985866  0.9661176471  0.7853107345  0.9973343488  0.8307926829  0.9970381340  0.8888888889  84.805653710  0.7315207795  2.5977830887  3000          0.6498114483 
0.9836572433  0.8371066630  0.9885159011  0.9787985866  0.9727058824  0.7871939736  0.9961919269  0.8307926829  0.9970381340  0.8933333333  93.286219081  0.7227181800  2.5977830887  3300          0.6331694961 
0.9832155472  0.8357887656  0.9876325088  0.9787985866  0.9807058824  0.7815442561  0.9958111196  0.8384146341  0.9977786005  0.8874074074  101.76678445  0.7220687934  2.5977830887  3600          0.6491219370 
0.9827738511  0.8371077220  0.9867491166  0.9787985866  0.9797647059  0.7796610169  0.9980959634  0.8353658537  0.9988893003  0.8962962963  110.24734982  0.5998665989  5.3943490982  3900          0.6411740907 
0.9840989394  0.8313659350  0.9858657244  0.9823321555  0.9830588235  0.7683615819  0.9965727342  0.8353658537  0.9981488338  0.8903703704  118.72791519  0.4363671262  5.3943490982  4200          0.6480296238 
0.9823321550  0.8349984942  0.9858657244  0.9787985866  0.9901176471  0.7777777778  0.9977151561  0.8353658537  0.9988893003  0.8918518519  127.20848056  0.4239849665  5.3943490982  4500          0.6379520353 
0.9818904589  0.8358808351  0.9849823322  0.9787985866  0.9887058824  0.7758945386  0.9988575781  0.8384146341  1.0000000000  0.8933333333  135.68904593  0.4194121727  5.3943490982  4800          0.6417287771 
0.9818904589  0.8410499694  0.9849823322  0.9787985866  0.9863529412  0.8003766478  0.9992383854  0.8353658537  0.9988893003  0.8874074074  141.34275618  0.4074591447  5.3943490982  5000          0.6139629459 
