-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdata_generation.py
240 lines (204 loc) · 7.14 KB
/
data_generation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
# #!/usr/bin/env python3
# Created by the authors of publication https://www.nature.com/articles/s41566-018-0107-6
# Adapted by David Meier on Apr 24 2024
import os
import h5py
import sys
import numpy as np
from tqdm import trange
from multiprocessing import Pool
def Job(joblist):
"""
Calculates the spectrogram and the detector images depending on the experimental parameters
For questions about the calculation refers to Gregor Hartmann
Parameters
----------
joblist : List
contains the jobs parameter for the generation.
Returns
-------
None
"""
np.random.seed(joblist[7])
N_batch = joblist[0]
KICK_MIN = joblist[1]
KICK_MAX = joblist[2]
PEAKS = joblist[3]
ELLIPT = joblist[4]
ELL_TILT = joblist[5]
PULSE = joblist[6]
HOT_ENABLED = joblist[8]
RANDOM_INTENSITY = joblist[9]
PHASE_STEP = joblist[10]
PHASE_STEPS = 80 # 1st dimension of Y
ENERGY_STEPS = 60 # 2nd dimension of X and Y
sigmax_max = 7
sigmay_max = 7
TILT = (np.pi,)
ENERGY_GAUSS = 1 # further partial wave properties(tilt and width)
# Used arrays for partial wave creation
DANGLE = np.array([np.radians(22.5 * i) for i in range(16)])
EA = np.array(range(ENERGY_STEPS)) # 2nd axis of X and Y
KEG_REC = (EA.copy()) # here it is the same, but for code extensions this distinction should be kept
PHASEG_REC = np.linspace(0, 2 * np.pi, PHASE_STEPS) # 1st axis of Y in physical values
ENERGY_GAUSS = 1
def gauss(x, x0, xw): # gaussian distribution
return np.exp(-((x - x0) ** 2) / 2 / (xw / 2.35) ** 2)
def ef(phase): # ellipticity function
return (ELLIPT) ** 2 / (
(ELLIPT * np.cos(phase - ELL_TILT)) ** 2 + (np.sin(phase - ELL_TILT)) ** 2)
def sine(ke, kick, phase): # sinefunction*ellipticity
return ke + kick * np.cos(DANGLE - phase) * ef(phase)
def sim(ke, kick, phase): # simulation of partial wave
en = sine(ke, kick, phase)[0]
return (
ANGDIST
* np.array([gauss(EA, en, ENERGY_GAUSS) for en in sine(ke, kick, phase)]).T
)
def create_basis_reconstruction(kick): # create a basis set for a fixed kick
Lout = []
for p in range(len(PHASEG_REC)):
for k in range(len(KEG_REC)):
Lout.append(sim(KEG_REC[k], kick, PHASEG_REC[p]))
return np.array(Lout)
def transform_YX(spec, basis): # using a given basis for partial wave adding
Lrec = np.zeros((ENERGY_STEPS, 16))
counter = 0
for p in range(PHASE_STEPS):
for k in range(ENERGY_STEPS):
Lrec += spec[k, p] * basis[counter]
counter += 1
return Lrec
def add_gauss(Y, sigmax, sigmay, centerx, centery, intensity):
Ynew = np.zeros((ENERGY_STEPS, PHASE_STEPS))
Yadd = Ynew.copy()
for x in range(PHASE_STEPS):
for y in range(ENERGY_STEPS):
Ynew[y, x] += (
intensity
* gauss(x, PHASE_STEPS // 2, sigmax)
* gauss(y, centery, sigmay)
)
Yadd = np.append(Ynew[:, centerx:PHASE_STEPS], Ynew[:, 0:centerx], axis=1)
Y += Yadd
return Y
def create_training_data():
Y = np.zeros((ENERGY_STEPS, PHASE_STEPS))
kick = np.random.uniform(low=KICK_MIN, high=KICK_MAX, size=None)
features = PEAKS
for _ in range(features):
if PHASE_STEP is not None:
centerx = PHASE_STEP
else:
centerx = int(np.random.rand() * PHASE_STEPS)
centery = int((np.random.rand()) * (ENERGY_STEPS - kick * 2) + kick)
sigmax = np.random.rand() * sigmax_max
sigmay = np.random.rand() * sigmay_max
if RANDOM_INTENSITY:
intensity = np.random.rand()
else:
intensity = 1.
Y = add_gauss(Y, sigmax, sigmay, centerx, centery, intensity)
if HOT_ENABLED:
hot_ones=np.random.randint(0,high=15)
for hotty in range(hot_ones):
x=np.random.randint(0,high=PHASE_STEPS)
y=np.random.randint(0,high=ENERGY_STEPS)
Y[y,x]+=np.random.rand()
basis_reconstruction = create_basis_reconstruction(kick)
X = transform_YX(Y, basis_reconstruction)
return [X, Y]
x = []
for i in trange(N_batch):
BETA_2 = 2. # np.random.uniform(-1, 2, size=1)
ANGDIST = 1 + BETA_2 / 2.0 * (0.5 - 0.5 * np.cos(2 * (DANGLE - TILT)) - 1)
trainer = create_training_data()
X = np.array(trainer[0])
x.append(X.flatten())
if __name__ == "__main__":
if PHASE_STEP is not None:
phase_step_string = "_phase"+str(PHASE_STEP)
else:
phase_step_string = ""
fe = h5py.File(
train_export
+ "N"
+ str(N_batch)
+ "_peaks"
+ str(PEAKS)
+ phase_step_string
+ "_seed"
+ str(joblist[7])
+ ".h5",
"w",
)
fe.create_dataset("x", data=np.array(x), compression="gzip")
fe.close()
else:
return trainer
if __name__ == "__main__":
# Amount of multithreading tasks/cpus
Number_Workers = 100
phase_separation_mode = True
if phase_separation_mode:
train_export = "./datasets/sigmaxy_7_peaks_0_20_hot_15_phase_separated/"
else:
train_export = "./datasets/sigmaxy_7_peaks_0_20_hot_15/"
if not os.path.exists(train_export):
os.makedirs(train_export)
Ltodo = []
# Amount of samples per file
N = 10000#0000
if phase_separation_mode:
files_per_peak = 1
else:
files_per_peak = 5
if phase_separation_mode:
max_peaks = 1
else:
max_peaks = 20
init_seed = 42 + int(sys.argv[1]) * files_per_peak * max_peaks
hot_enabled = True
# Fixed experimental parameters
kick_min = 0
kick_max = 100
ellipt = 0.73
elltilt = (90 - 22.5) / 180 * np.pi
pulse = 30
if phase_separation_mode:
for phase_step in range(0, 80):
Ltodo.append(
[
N,
kick_min,
kick_max,
1,
ellipt,
elltilt,
pulse,
init_seed,
hot_enabled,
True,
phase_step
]
)
else:
for file_nr in range(files_per_peak):
for peak in range(1, max_peaks + 1):
Ltodo.append(
[
N,
kick_min,
kick_max,
peak,
ellipt,
elltilt,
pulse,
init_seed + file_nr * max_peaks + peak,
hot_enabled,
True,
None
]
)
with Pool(Number_Workers) as p:
p.map(Job, Ltodo)