Environment:
	Python: 3.10.11
	PyTorch: 2.0.1
	Torchvision: 0.15.2
	CUDA: 11.7
	CUDNN: 8500
	NumPy: 1.24.3
	PIL: 9.4.0
	Testing environment: [2]
Args:
	algorithm: Selective_KD
	checkpoint_freq: 300
	data_dir: ./domainbed/data
	dataset: PACS
	holdout_fraction: 0.2
	hparams: {
    "resnet18": false,
    "resnet_dropout": 0,
    "nonlinear_classifier": false,
    "data_augmentation": true,
    "clip_backbone": "ViT-B/32",
    "student_model": "resnet",
    "SMA": true,
    "batch_size": 32
}
	hparams_seed: 4
	output_dir: sweep/ablation3/outputs/cdcda8e3e821a585699efda9574f503c
	save_linear_probed_clip: False
	save_model_every_checkpoint: False
	seed: 609294096
	skip_model_save: False
	steps: 5001
	sweep: True
	task: domain_generalization
	test_envs: [2]
	trial_seed: 1
	uda_holdout_fraction: 0
	visualize: False
Not saving models
HParams:
	SMA: True
	batch_size: 32
	class_balanced: False
	clip_backbone: ViT-B/32
	data_augmentation: True
	lambda1: 0.6171767606394463
	lambda2: 0.5293911814442921
	last_k_epoch: 0.3102645027326114
	lr: 5e-05
	nonlinear_classifier: False
	resnet18: False
	resnet_dropout: 0
	student_model: resnet
	temperature: 2.022572617872769
	weight_decay: 0.0001
	worst_case_p: 0.2
using augment transform
using augment transform
using normal transform
using augment transform
using device:  cuda
Using ViT-B/32...
constructing student model
using resnet 50
Using SMA
n_steps 5001
checkpoint_freq 300
agg_test_acc  agg_val_acc   env0_in_acc   env0_out_acc  env1_in_acc   env1_out_acc  env2_in_acc   env2_out_acc  env3_in_acc   env3_out_acc  epoch         loss          mem_gb        step          step_time    
0.1354790418  0.1426723818  0.1452104942  0.1320293399  0.1124733475  0.1367521368  0.1541916168  0.1167664671  0.1418575064  0.1592356688  0.0000000000  6.9573688507  1.7945718765  0             1.5428392887 
0.9887724546  0.9463266814  0.9762050031  0.9437652812  0.9760127932  0.9487179487  0.9895209581  0.9880239521  0.9640585242  0.9464968153  7.1856287425  2.5103344216  2.0936598778  300           0.1446376808 
0.9887724546  0.9610032899  0.9951189750  0.9608801956  0.9925373134  0.9743589744  0.9895209581  0.9880239521  0.9774173028  0.9477707006  14.371257485  1.1175154720  2.0936598778  600           0.1631041137 
0.9891467061  0.9603116903  0.9975594875  0.9584352078  0.9946695096  0.9658119658  0.9902694611  0.9880239521  0.9799618321  0.9566878981  21.556886227  0.9203556732  2.0936598778  900           0.1628660901 
0.9898952091  0.9582912932  0.9951189750  0.9608801956  0.9957356077  0.9636752137  0.9917664671  0.9880239521  0.9809160305  0.9503184713  28.742514970  0.8818769159  2.0936598778  1200          0.1636111355 
0.9910179636  0.9650440462  0.9987797437  0.9657701711  0.9978678038  0.9764957265  0.9910179641  0.9910179641  0.9860050891  0.9528662420  35.928143712  0.8430920970  2.0936598778  1500          0.1626660832 
0.9913922151  0.9674412009  0.9963392312  0.9657701711  0.9984008529  0.9722222222  0.9917664671  0.9910179641  0.9872773537  0.9643312102  43.113772455  0.8171265044  2.0936598778  1800          0.1621420336 
0.9906437121  0.9717831894  1.0000000000  0.9706601467  0.9989339019  0.9829059829  0.9902694611  0.9910179641  0.9879134860  0.9617834395  50.299401197  0.7888892374  2.0936598778  2100          0.1620022575 
0.9902694606  0.9712628192  0.9987797437  0.9755501222  0.9994669510  0.9700854701  0.9895209581  0.9910179641  0.9895038168  0.9681528662  57.485029940  0.7954045618  2.0936598778  2400          0.1624292946 
0.9906437121  0.9663521926  0.9993898719  0.9633251834  0.9994669510  0.9764957265  0.9902694611  0.9910179641  0.9920483461  0.9592356688  64.670658682  0.7935385740  2.0936598778  2700          0.1618458803 
0.9906437121  0.9710230678  0.9993898719  0.9731051345  0.9994669510  0.9743589744  0.9902694611  0.9910179641  0.9907760814  0.9656050955  71.856287425  0.7574641265  2.0936598778  3000          0.1620130936 
0.9910179636  0.9677424707  0.9975594875  0.9559902200  0.9984008529  0.9829059829  0.9910179641  0.9910179641  0.9920483461  0.9643312102  79.041916167  0.7383718248  2.0936598778  3300          0.1619091940 
0.9902694606  0.9677973074  0.9993898719  0.9706601467  1.0000000000  0.9722222222  0.9895209581  0.9910179641  0.9936386768  0.9605095541  86.227544910  0.5760709599  5.3996806145  3600          0.1853653002 
0.9902694606  0.9687972181  0.9993898719  0.9608801956  0.9994669510  0.9786324786  0.9895209581  0.9910179641  0.9939567430  0.9668789809  93.413173652  0.3395184646  5.3996806145  3900          0.2066209022 
0.9906437121  0.9669480882  1.0000000000  0.9608801956  0.9994669510  0.9743589744  0.9902694611  0.9910179641  0.9933206107  0.9656050955  100.59880239  0.3266207168  5.3996806145  4200          0.2063654319 
0.9902694606  0.9700847133  0.9987797437  0.9608801956  1.0000000000  0.9850427350  0.9895209581  0.9910179641  0.9965012723  0.9643312102  107.78443113  0.3120079616  5.3996806145  4500          0.2047701001 
0.9902694606  0.9698313143  0.9987797437  0.9682151589  0.9989339019  0.9807692308  0.9895209581  0.9910179641  0.9923664122  0.9605095541  114.97005988  0.3031914863  5.3996806145  4800          0.2056257820 
0.9921407181  0.9728515843  1.0000000000  0.9657701711  0.9994669510  0.9871794872  0.9902694611  0.9940119760  0.9942748092  0.9656050955  119.76047904  0.2949146495  5.3996806145  5000          0.2050201488 
