123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171 |
- {
- "_name_or_path": "clip-vit-large-patch14/",
- "architectures": [
- "CLIPModel"
- ],
- "initializer_factor": 1.0,
- "logit_scale_init_value": 2.6592,
- "model_type": "clip",
- "projection_dim": 768,
- "text_config": {
- "_name_or_path": "",
- "add_cross_attention": false,
- "architectures": null,
- "attention_dropout": 0.0,
- "bad_words_ids": null,
- "bos_token_id": 0,
- "chunk_size_feed_forward": 0,
- "cross_attention_hidden_size": null,
- "decoder_start_token_id": null,
- "diversity_penalty": 0.0,
- "do_sample": false,
- "dropout": 0.0,
- "early_stopping": false,
- "encoder_no_repeat_ngram_size": 0,
- "eos_token_id": 2,
- "finetuning_task": null,
- "forced_bos_token_id": null,
- "forced_eos_token_id": null,
- "hidden_act": "quick_gelu",
- "hidden_size": 768,
- "id2label": {
- "0": "LABEL_0",
- "1": "LABEL_1"
- },
- "initializer_factor": 1.0,
- "initializer_range": 0.02,
- "intermediate_size": 3072,
- "is_decoder": false,
- "is_encoder_decoder": false,
- "label2id": {
- "LABEL_0": 0,
- "LABEL_1": 1
- },
- "layer_norm_eps": 1e-05,
- "length_penalty": 1.0,
- "max_length": 20,
- "max_position_embeddings": 77,
- "min_length": 0,
- "model_type": "clip_text_model",
- "no_repeat_ngram_size": 0,
- "num_attention_heads": 12,
- "num_beam_groups": 1,
- "num_beams": 1,
- "num_hidden_layers": 12,
- "num_return_sequences": 1,
- "output_attentions": false,
- "output_hidden_states": false,
- "output_scores": false,
- "pad_token_id": 1,
- "prefix": null,
- "problem_type": null,
- "projection_dim" : 768,
- "pruned_heads": {},
- "remove_invalid_values": false,
- "repetition_penalty": 1.0,
- "return_dict": true,
- "return_dict_in_generate": false,
- "sep_token_id": null,
- "task_specific_params": null,
- "temperature": 1.0,
- "tie_encoder_decoder": false,
- "tie_word_embeddings": true,
- "tokenizer_class": null,
- "top_k": 50,
- "top_p": 1.0,
- "torch_dtype": null,
- "torchscript": false,
- "transformers_version": "4.16.0.dev0",
- "use_bfloat16": false,
- "vocab_size": 49408
- },
- "text_config_dict": {
- "hidden_size": 768,
- "intermediate_size": 3072,
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "projection_dim": 768
- },
- "torch_dtype": "float32",
- "transformers_version": null,
- "vision_config": {
- "_name_or_path": "",
- "add_cross_attention": false,
- "architectures": null,
- "attention_dropout": 0.0,
- "bad_words_ids": null,
- "bos_token_id": null,
- "chunk_size_feed_forward": 0,
- "cross_attention_hidden_size": null,
- "decoder_start_token_id": null,
- "diversity_penalty": 0.0,
- "do_sample": false,
- "dropout": 0.0,
- "early_stopping": false,
- "encoder_no_repeat_ngram_size": 0,
- "eos_token_id": null,
- "finetuning_task": null,
- "forced_bos_token_id": null,
- "forced_eos_token_id": null,
- "hidden_act": "quick_gelu",
- "hidden_size": 1024,
- "id2label": {
- "0": "LABEL_0",
- "1": "LABEL_1"
- },
- "image_size": 224,
- "initializer_factor": 1.0,
- "initializer_range": 0.02,
- "intermediate_size": 4096,
- "is_decoder": false,
- "is_encoder_decoder": false,
- "label2id": {
- "LABEL_0": 0,
- "LABEL_1": 1
- },
- "layer_norm_eps": 1e-05,
- "length_penalty": 1.0,
- "max_length": 20,
- "min_length": 0,
- "model_type": "clip_vision_model",
- "no_repeat_ngram_size": 0,
- "num_attention_heads": 16,
- "num_beam_groups": 1,
- "num_beams": 1,
- "num_hidden_layers": 24,
- "num_return_sequences": 1,
- "output_attentions": false,
- "output_hidden_states": false,
- "output_scores": false,
- "pad_token_id": null,
- "patch_size": 14,
- "prefix": null,
- "problem_type": null,
- "projection_dim" : 768,
- "pruned_heads": {},
- "remove_invalid_values": false,
- "repetition_penalty": 1.0,
- "return_dict": true,
- "return_dict_in_generate": false,
- "sep_token_id": null,
- "task_specific_params": null,
- "temperature": 1.0,
- "tie_encoder_decoder": false,
- "tie_word_embeddings": true,
- "tokenizer_class": null,
- "top_k": 50,
- "top_p": 1.0,
- "torch_dtype": null,
- "torchscript": false,
- "transformers_version": "4.16.0.dev0",
- "use_bfloat16": false
- },
- "vision_config_dict": {
- "hidden_size": 1024,
- "intermediate_size": 4096,
- "num_attention_heads": 16,
- "num_hidden_layers": 24,
- "patch_size": 14,
- "projection_dim": 768
- }
- }
|