{ "torch_dtype": "float32", "architectures": [ "CoNNModel" ], "pad_token_id": 17, "transformers_version": "4.26.1", "vocab_size": 28, "hidden_size": 103, "num_hidden_layers": 3, "num_attention_heads": 1, "hidden_act": "relu", "intermediate_size": 28, "hidden_dropout_prob": 0.0, "attention_probs_dropout_prob": 0.0, "max_position_embeddings": 16, "initializer_range": 0.02, "layer_norm": false, "layer_norm_eps": 0.02, "mlp_hidden_size": 32, "input_encoding_map": { "a": 0, "b": 1, "bos": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10, "k": 11, "l": 12, "m": 13, "n": 14, "o": 15, "p": 16, "pad": 17, "q": 18, "r": 19, "s": 20, "t": 21, "u": 22, "v": 23, "w": 24, "x": 25, "y": 26, "z": 27 }, "output_encoding_map": { "a": 0, "b": 1, "c": 2, "d": 3, "e": 4, "f": 5, "g": 6, "h": 7, "i": 8, "j": 9, "k": 10, "l": 11, "m": 12, "n": 13, "o": 14, "p": 15, "q": 16, "r": 17, "s": 18, "t": 19, "u": 20, "v": 21, "w": 22, "x": 23, "y": 24, "z": 25 }, "model_type": "conn" }