{ "model_config": { "_name_or_path": "xlm-roberta-base", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "initializer_range": 0.02, "intermediate_size": 3072, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.44.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }, "training_config": { "learning_rate": 2e-5, "num_train_epochs": 3, "per_device_train_batch_size": 16, "per_device_eval_batch_size": 16, "warmup_steps": 500, "weight_decay": 0.01, "logging_dir": "./logs", "evaluation_strategy": "epoch" }, "tokenizer_config": { "model_max_length": 512, "padding_side": "right", "truncation_side": "right", "special_tokens": { "pad_token": "", "unk_token": "", "bos_token": "", "eos_token": "" } }, "inference_config": { "task": "text-classification", "labels": ["HUMAN", "AI"], "threshold": 0.5, "max_length": 512, "batch_size": 32, "options": { "wait_for_model": true, "use_cache": true } }, "api_config": { "endpoint": "https://api-inference.huggingface.co/models/yaya36095/text-detector", "headers": { "Content-Type": "application/json" }, "cors": { "allow_origin": "*", "allow_headers": [ "authorization", "x-client-info", "apikey", "content-type" ] } }, "model_info": { "name": "text-detector", "version": "1.0.0", "author": "yaya36095", "description": "A model for detecting AI-generated vs human-written text", "license": "MIT", "repository": "https://huggingface.co/yaya36095/text-detector", "languages": ["multilingual"], "tags": [ "text-classification", "ai-detection", "xlm-roberta" ] }, "environment": { "framework": "transformers", "framework_version": "4.44.2", "python_version": ">=3.8.0", "cuda_support": true, "required_packages": { "torch": ">=1.10.0", "transformers": ">=4.44.2", "numpy": ">=1.19.0" } }, "logging_config": { "level": "INFO", "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s", "development_mode": { "debug": true, "verbose": true }, "production_mode": { "debug": false, "verbose": false } }, "performance_metrics": { "accuracy_threshold": 0.85, "latency_threshold_ms": 500, "max_batch_size": 64, "memory_limit_mb": 4096 } }