File size: 748 Bytes
6edcce8
03a2e07
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6edcce8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
{
  "batch_size": 4,
  "config": null,
  "fine_tune_type": "lora",
  "grad_accumulation_steps": 1,
  "grad_checkpoint": false,
  "iters": 100,
  "learning_rate": 2e-05,
  "lora_parameters": {
    "rank": 8,
    "dropout": 0.0,
    "scale": 20.0
  },
  "lr_schedule": null,
  "mask_prompt": false,
  "max_seq_length": 2048,
  "model": "mlx-community/Qwen2.5-Coder-0.5B-Instruct-4bit",
  "num_layers": 16,
  "optimizer": "adam",
  "optimizer_config": {
    "adam": {},
    "adamw": {},
    "muon": {},
    "sgd": {},
    "adafactor": {}
  },
  "project_name": null,
  "report_to": null,
  "save_every": 100,
  "seed": 0,
  "steps_per_eval": 200,
  "steps_per_report": 10,
  "test": false,
  "test_batches": 500,
  "train": true,
  "val_batches": 25
}