bert:
	f1: 0.5257648953301128
	precision: 0.6816283924843424
	recall: 0.42791612057667106
roberta:
	f1: 0.5656948125709959
	precision: 0.6467532467532467
	recall: 0.5026917900403769
xlnet:
	f1: 0.4699186991869919
	precision: 0.5452830188679245
	recall: 0.41285714285714287
albert:
	f1: 0.5718901453957997
	precision: 0.7539936102236422
	recall: 0.46063760572543916
