- 
		
- 
		
- 
		
- 
		
- 
		
- 
		
Inference Providers
				
			 
		
	
		Active filters: 
					DPO
				
			 
				Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF
				
				
			
		
				13B
			• 
	
				Updated
					
				
				• 
					
					105
				
	
				
• 
					
					11
				
  
				LoneStriker/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-3.0bpw-h6-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				
				
	
				
				 
				LoneStriker/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-4.0bpw-h6-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				• 
					
					1
				
	
				
• 
					
					1
				
 
				LoneStriker/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-5.0bpw-h6-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				
				
	
				
				 
				LoneStriker/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-6.0bpw-h6-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				
				
	
				• 
					
					3
				
 
				LoneStriker/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-8.0bpw-h8-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				
				
	
				
				 
				lighthouse-kr/Mistral-7B-lighthouse-merge-v0.1
				
				
			
			Text Generation
			
• 
		
				7B
			• 
	
				Updated
					
				
				
				
	
				
				 
				cloudyu/Yi-34Bx2-MoE-60B-DPO
				
				
			
			Text Generation
			
• 
		
				61B
			• 
	
				Updated
					
				
				• 
					
					8.47k
				
	
				
• 
					
					3
				
 
				SJ-Donald/SJ-SOLAR-10.7b-DPO
				
				
			
			Text Generation
			
• 
		
				11B
			• 
	
				Updated
					
				
				• 
					
					2
				
	
				
				
 
				cloudyu/Truthful_DPO_TomGrc_FusionNet_34Bx2_MoE
				
				
			
			Text Generation
			
• 
		
				61B
			• 
	
				Updated
					
				
				
				
	
				• 
					
					4
				
 
				cloudyu/Truthful_DPO_cloudyu_Mixtral_34Bx2_MoE_60B
				
				
			
			Text Generation
			
• 
		
				61B
			• 
	
				Updated
					
				
				• 
					
					2
				
	
				
				
 
				yunconglong/MoE_13B_DPO
				
				
			
			Text Generation
			
• 
		
				13B
			• 
	
				Updated
					
				
				• 
					
					8.47k
				
	
				
• 
					
					6
				
 
				yunconglong/13B_MATH_DPO
				
				
			
			Text Generation
			
• 
		
				13B
			• 
	
				Updated
					
				
				• 
					
					1
				
	
				
• 
					
					1
				
 
				pharaouk/fusedyi
				
				
			
			Text Generation
			
• 
		
				11B
			• 
	
				Updated
					
				
				• 
					
					1
				
	
				
				
 
				Andyrasika/mistral-ft-optimized-dpo
				
				
			
			Text Generation
			
• 
		
				7B
			• 
	
				Updated
					
				
				
				
	
				• 
					
					4
				
 
				mlx-community/NousHermes-Mixtral-8x7B-Reddit-mlx
				
				
			
		
				8B
			• 
	
				Updated
					
				
				• 
					
					62
				
	
				
• 
					
					6
				
  
				tanamettpk/TC-instruct-DPO
				
				
			
			Text Generation
			
• 
		
				7B
			• 
	
				Updated
					
				
				• 
					
					66
				
	
				
• 
					
					10
				
 
				NousResearch/Nous-Hermes-2-Mistral-7B-DPO-GGUF
				
				
			
		
				7B
			• 
	
				Updated
					
				
				• 
					
					9.34k
				
	
				
• 
					
					88
				
  
				mlx-community/Nous-Hermes-2-Mistral-7B-DPO-4bit-MLX
				
				
			
		
				1B
			• 
	
				Updated
					
				
				• 
					
					92
				
	
				
• 
					
					5
				
  
				bartowski/Nous-Hermes-2-Mistral-7B-DPO-exl2
				
				
			
			Text Generation
			
• 
		
	
				Updated
					
				
				• 
					
					1
				
	
				
• 
					
					2
				
 
				olafgeibig/Nous-Hermes-2-Mistral-7B-DPO-GGUF
				
				
			
		
				7B
			• 
	
				Updated
					
				
				• 
					
					113
				
	
				
				
  
				solidrust/Nous-Hermes-2-Mistral-7B-DPO-AWQ
				
				
			
			Text Generation
			
• 
		
				1B
			• 
	
				Updated
					
				
				• 
					
					4
				
	
				
• 
					
					8
				
 
				heyholetsgo/Nous-Hermes-2-Mistral-7B-DPO-AWQ
				
				
			
			Text Generation
			
• 
		
				7B
			• 
	
				Updated
					
				
				• 
					
					34
				
	
				
• 
					
					1
				
 
				solidrust/bagel-dpo-7b-v0.4-AWQ
				
				
			
			Text Generation
			
• 
		
				1B
			• 
	
				Updated
					
				
				• 
					
					4
				
	
				
				
 
				solidrust/Ignis-7B-DPO-Laser-AWQ
				
				
			
			Text Generation
			
• 
		
				1B
			• 
	
				Updated
					
				
				
				
	
				
				 
				Weni/WeniGPT-2.5.3-Zephyr-7B-zephyr-prompt-LLM_Base_2.0.3_DPO_reduction_variation
				
				
			
		
	
				Updated
					
				
				
				
	
				
				
 
				solidrust/Gecko-7B-v0.1-DPO-AWQ
				
				
			
			Text Generation
			
• 
		
				1B
			• 
	
				Updated
					
				
				• 
					
					1
				
	
				
				
 
				Weni/WeniGPT-2.6.3-Zephyr-7B-zephyr-prompt-LLM_Base_2.0.3_DPO_reduction_variation
				
				
			
		
	
				Updated
					
				
				
				
	
				
				
 
				Weni/WeniGPT-2.7.3-Zephyr-7B-zephyr-prompt-LLM_Base_2.0.3_DPO_reduction_variation
				
				
			
		
	
				Updated
					
				
				
				
	
				
				
 
				koesn/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF
				
				
			
		
				13B
			• 
	
				Updated
					
				
				• 
					
					19
				
	
				
• 
					
					1