Last modified by makuadm on 2026-01-07 06:21

From version 17.1
edited by cpassow
on 2022-04-04 11:38
Change comment: There is no comment for this version
To version 37.1
edited by sndueste
on 2023-09-29 11:13
Change comment: There is no comment for this version

Summary

Details

Page properties
Title
... ... @@ -1,1 +1,1 @@
1 -DEMO - Documentation
1 +DEMO - Working with FLASH data
Author
... ... @@ -1,1 +1,1 @@
1 -XWiki.cpassow
1 +XWiki.sndueste
Content
... ... @@ -1,47 +1,180 @@
1 -(% style="list-style-type: square;" %)
2 -* Short descriptions including Links:
3 -(% style="list-style-type: square;" %)
4 -** GPFS
5 -** JupyterHub
6 -** conda ?
1 +Experimental data is recorded as HDF files[[~[link~]>>doc:FLASHUSER.Data Acquisition and controls.Data Access at FLASH (DAQ, gpfs,\.\.\.).Offline data analysis (DAQ).The FLASH HDF5 structure.WebHome]] on the GPFS file system[[~[link~]>>doc:ASAP3.ASAP3 Data Storage for PETRA III]]. The access rights are linked to the user's DESY account and can be managed by the PI via the GAMMA portal[[~[link~]>>url:https://gamma-portal.desy.de/||shape="rect"]]. The experimental data can be downloaded via the GAMMA portal, but it is advised to use the DESY computing infrastructure. Access point are via ssh, Maxwell-Display Server[[~[link~]>>url:https://confluence.desy.de/display/MXW/Maxwell+Cluster||shape="rect"]] or JuyterHub[[~[link~]>>url:https://confluence.desy.de/display/MXW/JupyterHub+on+Maxwell||shape="rect"]]. We recommend using the JupyterHub for data exploration and the SLURM resources for high performances computing - see FAB for easy usage of the infrastructure.
7 7  
8 8  \\
9 9  
10 -(% style="list-style-type: square;" %)
11 -* Links Repository
12 -(% style="list-style-type: square;" %)
13 -** including Method Description?
5 +{{expand title="How to login JupyterHub"}}
14 14  
7 +
8 +{{view-file att--filename="tmp.mp4" height="250"/}}
9 +{{/expand}}
10 +
15 15  \\
16 16  
17 -(% style="list-style-type: square;" %)
18 -* Links to Binder
13 +\\
19 19  
15 +{{info}}
16 +=== There are different options that help you to work with the FLASH HDF5 data in Python ===
17 +
18 +The currently developed option for large data sets: [[the FAB package>>url:https://hasfcpkg.desy.de/fab/fab.html||shape="rect"]] ... see below
19 +
20 +and for smaller projects:  (% class="Object" %)[[https:~~/~~/gitlab.desy.de/christopher.passow/flash-daq-hdf>>url:https://gitlab.desy.de/christopher.passow/flash-daq-hdf||shape="rect"]]
21 +{{/info}}
22 +
20 20  \\
21 21  
22 -(% style="list-style-type: square;" %)
23 -* Screencast
25 +[[~[~[image:attach:image2023-9-29_11-1-37.png~]~]>>url:https://hasfcpkg.desy.de/fab/fab.html||shape="rect"]]
24 24  
25 25  \\
26 26  
29 +{{expand title="older ideas ..."}}
30 +(% class="Object" %)(object oriented) [[https:~~/~~/gitlab.desy.de/christopher.passow/fdh-builder>>url:https://gitlab.desy.de/christopher.passow/fdh-builder.git||shape="rect"]]
31 +
32 +----
33 +
34 +=== TODO ===
35 +
36 +(% class="task-list" %)
37 +(((
38 +{{task reference="/Tasks/Task_18" status="InProgress"}}
39 +Short descriptions including Links:   → as Text\\
40 +
41 +(% class="task-list" %)
42 +(((
43 +
44 +
45 +{{task reference="/Tasks/Task_19" status="InProgress"}}
46 +GPFS
47 +{{/task}}
48 +
49 +{{task reference="/Tasks/Task_40" status="InProgress"}}
50 +Access rights
51 +{{/task}}
52 +
53 +{{task reference="/Tasks/Task_41" status="InProgress"}}
54 +Gamma Portal
55 +{{/task}}
56 +
57 +{{task reference="/Tasks/Task_42" status="InProgress"}}
58 +Maxwell
59 +{{/task}}
60 +
61 +{{task reference="/Tasks/Task_20" status="InProgress"}}
62 +JupyterHub
63 +{{/task}}
64 +
65 +{{task reference="/Tasks/Task_21" status="InProgress"}}
66 +conda ?
67 +{{/task}}
68 +
69 +{{task reference="/Tasks/Task_36" status="InProgress"}}
70 +explain install from channel instead of fixed environment, but can use environment file from example repository
71 +{{/task}}
72 +)))
73 +{{/task}}
74 +)))
75 +
27 27  \\
28 28  
29 -Questions:
78 +(% class="task-list" %)
79 +(((
80 +{{task reference="/Tasks/Task_22" status="InProgress"}}
81 +distribution
30 30  
31 -(% style="list-style-type: square;" %)
32 -* for whom
33 -* where Maxwell / local / extern
34 -* distribution\\
35 -(% style="list-style-type: square;" %)
36 -** channel?
37 -(% style="list-style-type: square;" %)
38 -*** where official hosted (DESY, privat, conda-forge)
39 -** enviroment?
40 -(% style="list-style-type: square;" %)
41 -*** via files / already created?
83 +(% class="task-list" %)
84 +(((
85 +
42 42  
87 +{{task reference="/Tasks/Task_23" status="InProgress"}}
88 +channel  (where to host?)
89 +{{/task}}
90 +
91 +{{task reference="/Tasks/Task_24" status="InProgress"}}
92 +environment file (repository with examples)
93 +{{/task}}
94 +)))
95 +{{/task}}
96 +)))
97 +
43 43  \\
44 44  
100 +(% class="task-list" %)
101 +(((
102 +{{task reference="/Tasks/Task_25" status="InProgress"}}
103 +Documentation
104 +
105 +(% class="task-list" %)
106 +(((
107 +
108 +
109 +{{task reference="/Tasks/Task_26" status="InProgress"}}
110 +here VS repository VS sphinx
111 +{{/task}}
112 +)))
113 +{{/task}}
114 +)))
115 +
116 +\\
117 +
118 +(% class="task-list" %)
119 +(((
120 +{{task reference="/Tasks/Task_27" status="InProgress"}}
121 +Binder
122 +
123 +(% class="task-list" %)
124 +(((
125 +
126 +
127 +{{task reference="/Tasks/Task_28" status="InProgress"}}
128 +examples with Stefan
129 +{{/task}}
130 +)))
131 +{{/task}}
132 +)))
133 +
134 +\\
135 +
136 +(% class="task-list" %)
137 +(((
138 +{{task reference="/Tasks/Task_29" status="InProgress"}}
139 +Screencast
140 +
141 +(% class="task-list" %)
142 +(((
143 +
144 +
145 +{{task reference="/Tasks/Task_30" status="InProgress"}}
146 +login in jupyter hub
147 +{{/task}}
148 +
149 +{{task reference="/Tasks/Task_31" status="InProgress"}}
150 +login maxwell display
151 +{{/task}}
152 +
153 +{{task reference="/Tasks/Task_32" status="InProgress"}}
154 +use slix
155 +{{/task}}
156 +
157 +{{task reference="/Tasks/Task_33" status="InProgress"}}
158 +use hdfview plugin in jupterLab
159 +{{/task}}
160 +
161 +{{task reference="/Tasks/Task_34" status="InProgress"}}
162 +create conda env with flashh5
163 +{{/task}}
164 +)))
165 +{{/task}}
166 +)))
167 +
168 +\\
169 +
170 +\\
171 +
172 +----
173 +
174 +==== under review ====
175 +
176 +\\
177 +
45 45  {{code language="bash"}}
46 46  conda create -n flashh5 python=3.10 # 3.10 not necessary, but would prefer 3.8 or higher
47 47  source activate flashh5
... ... @@ -62,19 +62,20 @@
62 62  {{code language="py" title="moved to repository?"}}
63 63  class RunDirectory:
64 64  
65 - def get_run_table(): # more or less information? RunComment | Number of Files
198 + def get_run_table(): # more or less information? RunComment | Number of Files | start & stop time ?
66 66   ...
67 67  
68 - def get_run(daq, run_number): # daq is not needed
201 + def get_run(daq, run_number): # daq is not needed!
202 +
69 69   ...
70 70  
71 71  
72 -class Run:
206 +class Run: # constructor optional without RunDirectory or use there self.path
73 73  
74 74   def get_files():
75 75   ...
76 76  
77 - def get_channels(): # of file #1?
211 + def get_channels(): # of file #1
78 78   ...
79 79  
80 80   def get_start_time(): # better as attribute?
... ... @@ -82,11 +82,8 @@
82 82  
83 83   def get_stop_time(): # which? | better as attribute?
84 84   ...
85 -
86 -### for following methods to restrict number of files or separate method
87 -### e.g. create_df(files)
88 88  
89 - def to_df(daq_map):
220 + def to_df(daq_map): # to_df(daq_map, slice) slice=[0:4] -> throw Exception
90 90   ...
91 91  
92 92   def to_series(channel):
... ... @@ -101,9 +101,12 @@
101 101  run.to_series(daq_adr or daq_map) # on channel only?
102 102  run.to_array(daq_adr) # on channel only?
103 103  
104 -## is this interesting?
235 +## interesting?
105 105  # run.to_dask(daq_map)
106 106  # run.to_xarray(daq_map)
107 107  {{/code}}
108 108  
109 109  \\
241 +
242 +\\
243 +{{/expand}}