ShuaiYang03's picture
Upload folder using huggingface_hub
b9da561 verified
{
"action_dim": 7,
"action_model_type": "DiT-B",
"data_root_dir": "s3://real_data_raw/open_x_embodiment_origin",
"debug": false,
"fix_system1": true,
"future_action_window_size": 15,
"hf_token": "hf_token",
"image_aug": true,
"is_resume": false,
"load_all_data_for_training": true,
"num_of_meta_query": 64,
"past_action_window_size": 0,
"pretrained_checkpoint": "/mnt/petrelfs/yangshuai1/rep/cogact_with_history/outputs/head_balation/sys12_meta_query_action_only_sync_pretraining_v2_query_64_mlp_lora--image_aug/checkpoints/step-034500-epoch-08_unload_lora.pt",
"repeated_diffusion_steps": 4,
"resume_epoch": null,
"resume_step": null,
"run_id": "sys12_meta_query_full_finetune_sync_cotraining_v2_xlora_freeze_head_instruction_long--image_augstage2",
"run_id_note": null,
"run_root_dir": "outputs/head_balation",
"save_interval": 1500,
"seed": 42,
"stage": "stage2",
"trackers": [
"jsonl",
"wandb"
],
"use_ema": false,
"use_mm": true,
"vla": {
"action_tokenizer": "extra_action_tokenizer",
"base_vlm": "/mnt/petrelfs/yangshuai1/yangshuai1/share_mllm/Eagle2-2B",
"data_mix": "bridge_rt_1",
"enable_gradient_checkpointing": false,
"enable_mixed_precision_training": true,
"epochs": 100,
"expected_world_size": 64,
"freeze_llm_backbone": false,
"freeze_vision_backbone": false,
"global_batch_size": 512,
"learning_rate": 5e-05,
"lr_scheduler_type": "constant",
"max_grad_norm": 1.0,
"max_steps": null,
"per_device_batch_size": 8,
"reduce_in_full_precision": true,
"shuffle_buffer_size": 250000,
"train_strategy": "fsdp-full-shard",
"type": "prism-qwen25-dinosiglip-224px+0_5b",
"unfreeze_last_llm_layer": false,
"vla_id": "prism-qwen25-dinosiglip-224px+0_5b",
"warmup_ratio": 0.0,
"weight_decay": 0.0
},
"wandb_entity": "shuaiyang2003",
"wandb_project": "dual_sys"
}