Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 729ce8e verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model dc25438 verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model fc3fab3 verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 618206f verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model ba193dd verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 442fc6d verified mciccone commited on Jun 10
Add llama_finetune_temporal_sequences_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model 6016fd7 verified mciccone commited on Jun 10