Last modified by makuadm on 2026-01-07 06:21

From version 2.1
edited by cpassow
on 2022-04-04 08:06
Change comment: There is no comment for this version
To version 38.1
edited by sndueste
on 2023-09-29 11:15
Change comment: There is no comment for this version

Summary

Details

Page properties
Title
... ... @@ -1,1 +1,1 @@
1 -DEMO - Documentation
1 +DEMO - Working with FLASH data
Author
... ... @@ -1,1 +1,1 @@
1 -XWiki.cpassow
1 +XWiki.sndueste
Content
... ... @@ -1,27 +1,239 @@
1 -(% style="list-style-type: square;" %)
2 -* Short descriptions including Links:
3 -(% style="list-style-type: square;" %)
4 -** GPFS
5 -** JupyterHub
6 -** conda ?
1 +Experimental data is recorded as HDF files[[~[link~]>>doc:FLASHUSER.Data Acquisition and controls.Data Access at FLASH (DAQ, gpfs,\.\.\.).Offline data analysis (DAQ).The FLASH HDF5 structure.WebHome]] on the GPFS file system[[~[link~]>>doc:ASAP3.ASAP3 Data Storage for PETRA III]]. The access rights are linked to the user's DESY account and can be managed by the PI via the GAMMA portal[[~[link~]>>url:https://gamma-portal.desy.de/||shape="rect"]]. The experimental data can be downloaded via the GAMMA portal, but it is advised to use the DESY computing infrastructure. Access point are via ssh, Maxwell-Display Server[[~[link~]>>url:https://confluence.desy.de/display/MXW/Maxwell+Cluster||shape="rect"]] or JuyterHub[[~[link~]>>url:https://confluence.desy.de/display/MXW/JupyterHub+on+Maxwell||shape="rect"]]. We recommend using the JupyterHub for data exploration and the SLURM resources for high performances computing - see FAB for easy usage of the infrastructure.
7 7  
8 -(% style="list-style-type: square;" %)
9 -* Links Repository
10 -(% style="list-style-type: square;" %)
11 -** including Method Description
3 +{{info title="How to login JupyterHub"}}
4 +=== ===
12 12  
6 +{{view-file att--filename="tmp.mp4" height="150"/}}
7 +{{/info}}
8 +
13 13  \\
14 14  
15 -(% style="list-style-type: square;" %)
16 -* Links to Binder
11 +{{info}}
12 +=== There are different options that help you to work with the FLASH HDF5 data in Python ===
17 17  
14 +The currently developed option for large data sets: [[the FAB package>>url:https://hasfcpkg.desy.de/fab/fab.html||shape="rect"]] ... see below
15 +
16 +and for smaller projects:  (% class="Object" %)[[https:~~/~~/gitlab.desy.de/christopher.passow/flash-daq-hdf>>url:https://gitlab.desy.de/christopher.passow/flash-daq-hdf||shape="rect"]]
17 +{{/info}}
18 +
18 18  \\
19 19  
21 +[[~[~[image:attach:image2023-9-29_11-1-37.png~]~]>>url:https://hasfcpkg.desy.de/fab/fab.html||shape="rect"]]
22 +
20 20  \\
21 21  
22 -Questions:
25 +{{expand title="older ideas ..."}}
26 +(% class="Object" %)(object oriented) [[https:~~/~~/gitlab.desy.de/christopher.passow/fdh-builder>>url:https://gitlab.desy.de/christopher.passow/fdh-builder.git||shape="rect"]]
23 23  
24 -(% style="list-style-type: square;" %)
25 -* for whom
26 -* where Maxwell / local / extern
27 -* distribution
28 +----
29 +
30 +=== TODO ===
31 +
32 +(% class="task-list" %)
33 +(((
34 +{{task reference="/Tasks/Task_18" status="InProgress"}}
35 +Short descriptions including Links:   → as Text\\
36 +
37 +(% class="task-list" %)
38 +(((
39 +
40 +
41 +{{task reference="/Tasks/Task_19" status="InProgress"}}
42 +GPFS
43 +{{/task}}
44 +
45 +{{task reference="/Tasks/Task_40" status="InProgress"}}
46 +Access rights
47 +{{/task}}
48 +
49 +{{task reference="/Tasks/Task_41" status="InProgress"}}
50 +Gamma Portal
51 +{{/task}}
52 +
53 +{{task reference="/Tasks/Task_42" status="InProgress"}}
54 +Maxwell
55 +{{/task}}
56 +
57 +{{task reference="/Tasks/Task_20" status="InProgress"}}
58 +JupyterHub
59 +{{/task}}
60 +
61 +{{task reference="/Tasks/Task_21" status="InProgress"}}
62 +conda ?
63 +{{/task}}
64 +
65 +{{task reference="/Tasks/Task_36" status="InProgress"}}
66 +explain install from channel instead of fixed environment, but can use environment file from example repository
67 +{{/task}}
68 +)))
69 +{{/task}}
70 +)))
71 +
72 +\\
73 +
74 +(% class="task-list" %)
75 +(((
76 +{{task reference="/Tasks/Task_22" status="InProgress"}}
77 +distribution
78 +
79 +(% class="task-list" %)
80 +(((
81 +
82 +
83 +{{task reference="/Tasks/Task_23" status="InProgress"}}
84 +channel  (where to host?)
85 +{{/task}}
86 +
87 +{{task reference="/Tasks/Task_24" status="InProgress"}}
88 +environment file (repository with examples)
89 +{{/task}}
90 +)))
91 +{{/task}}
92 +)))
93 +
94 +\\
95 +
96 +(% class="task-list" %)
97 +(((
98 +{{task reference="/Tasks/Task_25" status="InProgress"}}
99 +Documentation
100 +
101 +(% class="task-list" %)
102 +(((
103 +
104 +
105 +{{task reference="/Tasks/Task_26" status="InProgress"}}
106 +here VS repository VS sphinx
107 +{{/task}}
108 +)))
109 +{{/task}}
110 +)))
111 +
112 +\\
113 +
114 +(% class="task-list" %)
115 +(((
116 +{{task reference="/Tasks/Task_27" status="InProgress"}}
117 +Binder
118 +
119 +(% class="task-list" %)
120 +(((
121 +
122 +
123 +{{task reference="/Tasks/Task_28" status="InProgress"}}
124 +examples with Stefan
125 +{{/task}}
126 +)))
127 +{{/task}}
128 +)))
129 +
130 +\\
131 +
132 +(% class="task-list" %)
133 +(((
134 +{{task reference="/Tasks/Task_29" status="InProgress"}}
135 +Screencast
136 +
137 +(% class="task-list" %)
138 +(((
139 +
140 +
141 +{{task reference="/Tasks/Task_30" status="InProgress"}}
142 +login in jupyter hub
143 +{{/task}}
144 +
145 +{{task reference="/Tasks/Task_31" status="InProgress"}}
146 +login maxwell display
147 +{{/task}}
148 +
149 +{{task reference="/Tasks/Task_32" status="InProgress"}}
150 +use slix
151 +{{/task}}
152 +
153 +{{task reference="/Tasks/Task_33" status="InProgress"}}
154 +use hdfview plugin in jupterLab
155 +{{/task}}
156 +
157 +{{task reference="/Tasks/Task_34" status="InProgress"}}
158 +create conda env with flashh5
159 +{{/task}}
160 +)))
161 +{{/task}}
162 +)))
163 +
164 +\\
165 +
166 +\\
167 +
168 +----
169 +
170 +==== under review ====
171 +
172 +\\
173 +
174 +{{code language="bash"}}
175 +conda create -n flashh5 python=3.10 # 3.10 not necessary, but would prefer 3.8 or higher
176 +source activate flashh5
177 +conda install ipython numpy pandas #TODO: fix dependcies
178 +conda install -c https://www.desy.de/~cpassow/condarepo/ flashh5
179 +
180 +## on jhub
181 +conda install ipykernel
182 +python -m ipykernel install --user --name flashh5 --display-name "flashh5"
183 +
184 +
185 +## to remove on jhub
186 +## delete from: /home/$USER/.local/share/jupyter/kernels/
187 +{{/code}}
188 +
189 +\\
190 +
191 +{{code language="py" title="moved to repository?"}}
192 +class RunDirectory:
193 +
194 + def get_run_table(): # more or less information? RunComment | Number of Files | start & stop time ?
195 + ...
196 +
197 + def get_run(daq, run_number): # daq is not needed!
198 +
199 + ...
200 +
201 +
202 +class Run: # constructor optional without RunDirectory or use there self.path
203 +
204 + def get_files():
205 + ...
206 +
207 + def get_channels(): # of file #1
208 + ...
209 +
210 + def get_start_time(): # better as attribute?
211 + ...
212 +
213 + def get_stop_time(): # which? | better as attribute?
214 + ...
215 +
216 + def to_df(daq_map): # to_df(daq_map, slice) slice=[0:4] -> throw Exception
217 + ...
218 +
219 + def to_series(channel):
220 + ...
221 +
222 + def to_array(channel):
223 + ...
224 +{{/code}}
225 +
226 +{{code language="py" title="ideas"}}
227 +run.to_df(daq_map)
228 +run.to_series(daq_adr or daq_map) # on channel only?
229 +run.to_array(daq_adr) # on channel only?
230 +
231 +## interesting?
232 +# run.to_dask(daq_map)
233 +# run.to_xarray(daq_map)
234 +{{/code}}
235 +
236 +\\
237 +
238 +\\
239 +{{/expand}}