haotiansun014 commited on
Commit
f7d44b9
·
verified ·
1 Parent(s): 54ace6a

Upload params.txt with huggingface_hub

Browse files
Files changed (1) hide show
  1. params.txt +12 -12
params.txt CHANGED
@@ -7,7 +7,7 @@ beta2: 0.98
7
  beta_decay_epochs: 16
8
  cache_dir: None
9
  calculate_full: True
10
- checkpoint_path: ./logs/fit_w_prev-bias-10.0sample-negFalse-scaleTrue-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032041/checkpoints
11
  coca_caption_loss_weight: 2.0
12
  coca_contrastive_loss_weight: 1.0
13
  contrast_neg_only: False
@@ -28,12 +28,12 @@ distill: False
28
  distill_model: None
29
  distill_pretrained: None
30
  distributed: True
31
- ema_z_mode: ema_only
32
  epochs: 33
33
  epochs_cooldown: None
34
  eps: 1e-06
35
  fit_neg_only: True
36
- fit_w_prev: True
37
  force_custom_text: False
38
  force_image_size: None
39
  force_patch_dropout: None
@@ -53,11 +53,11 @@ imagenet_val: /scratch/imagenet/val/
53
  init_lambda: 1.0
54
  init_logit_bias: -10.0
55
  init_logit_scale: None
56
- lambda_eps: 1e-07
57
  lambda_lr: 0.001
58
- lambda_tolerance: 0.005
59
- lambda_update_frequency: [5]
60
- learn_logit_bias: True
61
  learn_logit_scale: True
62
  local_loss: True
63
  local_rank: 0
@@ -70,7 +70,7 @@ lock_text_unlocked_layers: 0
70
  log_every_n_steps: 100
71
  log_level: 20
72
  log_local: False
73
- log_path: ./logs/fit_w_prev-bias-10.0sample-negFalse-scaleTrue-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032041/out.log
74
  logit_scale_clamp: 100
75
  logs: ./logs/
76
  loss_type: COND_EXP
@@ -84,11 +84,11 @@ model_beta_init: 0.0
84
  model_ema_beta: 0.99
85
  model_update_type: ONE_STEP
86
  n_class_tokens: -1
87
- name: fit_w_prev-bias-10.0sample-negFalse-scaleTrue-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032041
88
  no_set_device_rank: False
89
  norm_cap: 1.0
90
  normalize_type: L2
91
- note: fit_w_prev
92
  pos_coef: 1.0
93
  precision: amp
94
  pretrained:
@@ -101,7 +101,7 @@ report_to: wandb
101
  resume: None
102
  save_frequency: 1
103
  save_most_recent: False
104
- scale_loss: True
105
  seed: 42
106
  siglip: False
107
  skip_scheduler: False
@@ -114,7 +114,7 @@ trace: False
114
  train_data: /scratch/cc12m/{00000..01240}.tar
115
  train_data_upsampling_factors: None
116
  train_num_samples: 9187328
117
- update_ema_mlps_every_n_steps: 1
118
  use_bn_sync: False
119
  use_bnb_linear: None
120
  use_feature_diff: False
 
7
  beta_decay_epochs: 16
8
  cache_dir: None
9
  calculate_full: True
10
+ checkpoint_path: ./logs/emn_fix-bias-10.0sample-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032307/checkpoints
11
  coca_caption_loss_weight: 2.0
12
  coca_contrastive_loss_weight: 1.0
13
  contrast_neg_only: False
 
28
  distill_model: None
29
  distill_pretrained: None
30
  distributed: True
31
+ ema_z_mode: ema_n_sample
32
  epochs: 33
33
  epochs_cooldown: None
34
  eps: 1e-06
35
  fit_neg_only: True
36
+ fit_w_prev: False
37
  force_custom_text: False
38
  force_image_size: None
39
  force_patch_dropout: None
 
53
  init_lambda: 1.0
54
  init_logit_bias: -10.0
55
  init_logit_scale: None
56
+ lambda_eps: 1e-12
57
  lambda_lr: 0.001
58
+ lambda_tolerance: 0.0005
59
+ lambda_update_frequency: [20, 10, 1]
60
+ learn_logit_bias: False
61
  learn_logit_scale: True
62
  local_loss: True
63
  local_rank: 0
 
70
  log_every_n_steps: 100
71
  log_level: 20
72
  log_local: False
73
+ log_path: ./logs/emn_fix-bias-10.0sample-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032307/out.log
74
  logit_scale_clamp: 100
75
  logs: ./logs/
76
  loss_type: COND_EXP
 
84
  model_ema_beta: 0.99
85
  model_update_type: ONE_STEP
86
  n_class_tokens: -1
87
+ name: emn_fix-bias-10.0sample-negFalse-scaleFalse-pos1.0-eps1e-06-stocFalselr0.0004-ViT-B-32-1990_0124032307
88
  no_set_device_rank: False
89
  norm_cap: 1.0
90
  normalize_type: L2
91
+ note: emn_fix
92
  pos_coef: 1.0
93
  precision: amp
94
  pretrained:
 
101
  resume: None
102
  save_frequency: 1
103
  save_most_recent: False
104
+ scale_loss: False
105
  seed: 42
106
  siglip: False
107
  skip_scheduler: False
 
114
  train_data: /scratch/cc12m/{00000..01240}.tar
115
  train_data_upsampling_factors: None
116
  train_num_samples: 9187328
117
+ update_ema_mlps_every_n_steps: 2000
118
  use_bn_sync: False
119
  use_bnb_linear: None
120
  use_feature_diff: False