prasadsachin's picture
Upload folder using huggingface_hub
ab6fee2 verified
{
"module": "keras_hub.src.models.depth_anything.depth_anything_backbone",
"class_name": "DepthAnythingBackbone",
"config": {
"name": "depth_anything_backbone",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_encoder": {
"module": "keras_hub.src.models.dinov2.dinov2_backbone",
"class_name": "DINOV2Backbone",
"config": {
"name": "dinov2_backbone",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"patch_size": 14,
"num_layers": 24,
"hidden_dim": 1024,
"num_heads": 16,
"intermediate_dim": 4096,
"layer_scale_init_value": 1.0,
"num_register_tokens": 0,
"use_mask_token": true,
"use_swiglu_ffn": false,
"dropout_rate": 0.0,
"drop_path_rate": 0.0,
"image_shape": [
518,
518,
3
],
"position_embedding_shape": [
518,
518
],
"antialias_in_interpolation": false,
"apply_layernorm": true
},
"registered_name": "keras_hub>DINOV2Backbone"
},
"reassemble_factors": [
4,
2,
1,
0.5
],
"neck_hidden_dims": [
256,
512,
1024,
1024
],
"fusion_hidden_dim": 256,
"head_hidden_dim": 32,
"head_in_index": -1,
"feature_keys": [
"stage5",
"stage12",
"stage18",
"stage24"
]
},
"registered_name": "keras_hub>DepthAnythingBackbone"
}