text
stringclasses
206 values
lr: 0.0004
lr_cooldown_end: 0.0
lr_cooldown_power: 1.0
lr_scheduler: cosine
lr_tau: 0.0001
model: ViT-B-32
model_beta_init: 0.0
model_ema_beta: 0.99
model_update_type: ONE_STEP
n_class_tokens: -1
name: emn_fix-bias-10.0sample-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032307
no_set_device_rank: False
norm_cap: 1.0
normalize_type: L2
note: emn_fix
pos_coef: 1.0
precision: amp
pretrained:
pretrained_image: False
rank: 0
remote_sync: None
remote_sync_frequency: 300
remote_sync_protocol: s3
report_to: wandb
resume: None
save_frequency: 1
save_most_recent: False
scale_loss: False
seed: 42
siglip: False
skip_scheduler: False
stoc_fit_lambda: False
tensorboard: False
tensorboard_path:
torchcompile: False
torchscript: False
trace: False
train_data: /scratch/cc12m/{00000..01240}.tar
train_data_upsampling_factors: None
train_num_samples: 9187328
update_ema_mlps_every_n_steps: 2000
use_bn_sync: False
use_bnb_linear: None
use_feature_diff: False
val_data: /scratch/cc12m/{01241..01242}.tar
val_frequency: 1
val_num_samples: 205824
wandb: True
wandb_notes:
wandb_project_name: open-clip
warmup: 10000
wd: 0.1
workers: 6
world_size: 2
z_beta_max: 0.8
zeroshot_frequency: 1
accum_freq: 1
added_positive_type: sample
aug_cfg: {}
batch_size: 1990
beta1: 0.9
beta2: 0.98
beta_decay_epochs: 16
cache_dir: None
calculate_full: True
checkpoint_path: ./logs/emn_fix-bias-10.0sample-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124122059/checkpoints
coca_caption_loss_weight: 2.0
coca_contrastive_loss_weight: 1.0
contrast_neg_only: False
copy_codebase: False
csv_caption_key: title
csv_img_key: filepath
csv_separator:
dataset_resampled: False
dataset_type: webdataset
ddp_static_graph: False
debug: False
delete_previous_checkpoint: False
denormalize_features: False
device: cuda:0
dist_backend: None
dist_url: None
distill: False
distill_model: None
distill_pretrained: None
distributed: True
ema_z_mode: ema_n_model
epochs: 33
epochs_cooldown: None
eps: 1e-06
fit_neg_only: True
fit_w_prev: False
force_custom_text: False
force_image_size: None
force_patch_dropout: None
force_quick_gelu: False
freeze_lambda_after_num_epochs: 100
gather_with_grad: True
grad_checkpointing: False
grad_clip_norm: 1.0