Updating instance size and instance type, see bug & solution at https://github.com/huggingface/huggingface_hub/issues/2277

#9
by meg HF staff - opened
Files changed (1) hide show
  1. main_backend_lighteval.py +2 -1
main_backend_lighteval.py CHANGED
@@ -68,7 +68,8 @@ def run_auto_eval():
68
  # For GPU
69
  # instance_size, instance_type = "small", "g4dn.xlarge"
70
  # For CPU
71
- instance_size, instance_type = "medium", "c6i"
 
72
  logger.info(f'Starting Evaluation of {eval_request.json_filepath} on Inference endpoints: {instance_size} {instance_type}')
73
 
74
  run_evaluation(
 
68
  # For GPU
69
  # instance_size, instance_type = "small", "g4dn.xlarge"
70
  # For CPU
71
+ # Updated naming available at https://huggingface.co/docs/inference-endpoints/pricing
72
+ instance_size, instance_type = "x4", "intel-icl"
73
  logger.info(f'Starting Evaluation of {eval_request.json_filepath} on Inference endpoints: {instance_size} {instance_type}')
74
 
75
  run_evaluation(