generated from ashleve/lightning-hydra-template
-
Notifications
You must be signed in to change notification settings - Fork 0
/
jetclass_classifier_epic.yaml
95 lines (82 loc) · 2.16 KB
/
jetclass_classifier_epic.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# @package _global_
# to execute this experiment run:
# python train.py experiment=jetclass_classifier
defaults:
- override /data: classifier_data_jetclass.yaml
# - override /model: mlp_classifier.yaml
# - override /model: particlenet_classifier.yaml
# - override /model: ParT_classifier.yaml
- override /model: epic_classifier_jetclass.yaml
- override /callbacks: jetclass_classifier.yaml
- override /trainer: gpu.yaml
# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters
# add here checkpoint to continue training
# ckpt_path: XXX/checkpoints/last-EMA.ckpt
tags: ["fm-classifier_test", "JetClass", "ClassifierTest"]
run_note: ""
seed: 122
vars:
epochs: 200
warmup: 3
val_check_interval: null
data:
# kin_only: true
batch_size: 512
# number_of_jets: 100000
use_weaver_axes_convention: false
data_file: XXX.h5
pf_features_list:
- part_etarel
- part_dphi
- log_part_pt
- log_part_energy
- log_part_ptrel
- log_part_energyrel
- part_deltaR
- part_charge
- part_isChargedHadron
- part_isNeutralHadron
- part_isPhoton
- part_isElectron
- part_isMuon
- tanh_part_d0val
- part_d0err
- tanh_part_dzval
- part_dzerr
# debug_sim_only: true
used_flavor: Tbqq
# setting load_weights_from will load the weights from the given checkpoint, but start training from scratch
# load_weights_from: XXX.ckpt
model:
net_config:
dropout: 0.3
input_dim: 17
out_dim: 2
# num_points: 200 # top tagging landscape
# input_dims: 7 # ParticleNet
# input_dim: 7 # ParT
# num_classes: 2
# scheduler:
# warmup: ${vars.warmup}
# max_iters: ${vars.epochs}
# optimizer:
# lr: 0.0001
#early_stopping:
# monitor: "val/loss"
# patience: 2000
# mode: "min"
task_name: "jetclass_classifier"
trainer:
min_epochs: 1
max_epochs: ${vars.epochs}
val_check_interval: ${vars.val_check_interval}
gradient_clip_val: 0.5
logger:
wandb:
tags: ${tags}
group: "flow_matching_jetclass"
name: ${task_name}
comet:
experiment_name: null
project_name: "flow-matching-classifierTest"