Loadstatistics Namespace Reference

Functions

def load_statistics (filenames, opt=None)
 
def load_file (filename, opt)
 
def load_stat_file (filename)
 
def load_data_file (filename)
 
def make_readable (data)
 
def get_standard_variables (data, opt)
 
def get_depth_variables (data)
 
def dsort_ev (V, D)
 
def get_momentum_equation (data)
 
def read_ene (statname, data)
 
def read_restart (statname, data)
 
def nabla (variable, x, y, z)
 
def nablaCenter (variable, x, y, z)
 

Function Documentation

◆ dsort_ev()

def Loadstatistics.dsort_ev (   V,
  D 
)
388 def dsort_ev(V, D):
389  Ds=D
390  Vs=V
391  tmpvec = np.empty(3)
392  if (Ds[0,0])<(Ds[1,1]):
393  tmp=Ds[0,0]
394  Ds[0,0]=Ds[1,1]
395  Ds[1,1]=tmp
396  tmpvec[:]=Vs[:,0]
397  Vs[:,0]=Vs[:,1]
398  Vs[:,1]=tmpvec[:]
399  if (Ds[1,1])<(Ds[2,2]):
400  tmp=Ds[1,1]
401  Ds[1,1]=Ds[2,2]
402  Ds[2,2]=tmp
403  tmpvec[:] = Vs[:, 1]
404  Vs[:, 1] = Vs[:, 2]
405  Vs[:, 2] = tmpvec[:]
406  if (Ds[0,0])<(Ds[1,1]):
407  tmp=Ds[0,0]
408  Ds[0,0]=Ds[1,1]
409  Ds[1,1]=tmp
410  tmpvec[:]=Vs[:,0]
411  Vs[:,0]=Vs[:,1]
412  Vs[:,1]=tmpvec[:]
413  if (Ds[1,1])<(Ds[2,2]):
414  tmp=Ds[1,1]
415  Ds[1,1]=Ds[2,2]
416  Ds[2,2]=tmp
417  tmpvec[:]=Vs[:,1]
418  Vs[:,1]=Vs[:,2]
419  Vs[:,2]=tmpvec[:]
420  return Vs, Ds
421 
422 
def dsort_ev(V, D)
Definition: Loadstatistics.py:388

Referenced by get_standard_variables().

◆ get_depth_variables()

def Loadstatistics.get_depth_variables (   data)
355 def get_depth_variables(data):
356  dz = np.diff(data['z'][0:2, 0])
357  nz = data['z'].shape[0]
358  data['ShearRate'] = np.diff(data['VelocityX']) / dz
359  data['ShearRate'] = np.append(data['ShearRate'], data['ShearRate'][-1])
360  if 'd' in data:
361  data['InertialParamter'] = data['ShearRate'] * data['d'] / np.sqrt(data['StressZZ'] / data['ParticleDensity'])
362  data['FlowVolumeFraction'] = np.mean(data['VolumeFraction'])
363  data['FlowDensity'] = np.mean(data['Density'], 1)
364  data['FlowMomentumX'] = np.mean(data['MomentumX'], 1)
365  data['FlowMomentumY'] = np.mean(data['MomentumY'], 1)
366  data['FlowMomentumZ'] = np.mean(data['MomentumZ'], 1)
367  data['FlowVelocityX'] = data['FlowMomentumX'] / data['FlowDensity']
368  data['FlowVelocityY'] = data['FlowMomentumY'] / data['FlowDensity']
369  data['FlowVelocityZ'] = data['FlowMomentumZ'] / data['FlowDensity']
370  data['BottomFrictionCoefficient'] = -data['StressXZ'][0, :] / data['StressZZ'][0, :]
371  if not (data['nx'] == 1 and data['ny'] == 1):
372  ContactStressZZ = data['NormalStressZZ'] + data['TangentialStressZZ']
373  data['FlowHeight'] = np.max(data['z'] * (ContactStressZZ != 0), 1)
374  else:
375  kappa = 0.02
376  IntDensity = np.cumsum(data['Density'])
377  Base = np.min(data['z'][IntDensity >= kappa * np.max(IntDensity)])
378  Surface = np.max(data['z'][IntDensity <= (1 - kappa) * np.max(IntDensity)])
379  data['FlowHeight'] = (Surface - Base) / (1 - 2 * kappa)
380  data['Base'] = Base - kappa * data['FlowHeight']
381  data['Surface'] = Surface + kappa * data['FlowHeight']
382  FlowHeightIndex = data['z'] > data['Base'] and data['z'] < data['Base'] + data['FlowHeight']
383  if not FlowHeightIndex.any() and 'Gravity' in data:
384  data['Froude'] = np.linalg.norm([data['FlowVelocityX'], data['FlowVelocityY'], data['FlowVelocityZ']]) / np.sqrt(
385  data['FlowHeight'] * (-data['Gravity'][2]))
386  return data
387 
def get_depth_variables(data)
Definition: Loadstatistics.py:355

Referenced by get_standard_variables().

◆ get_momentum_equation()

def Loadstatistics.get_momentum_equation (   data)
423 def get_momentum_equation(data):
424  if 'VolumeFraction_dx' in data:
425  NablaMomentumX = data['ParticleDensity'][0] * np.array(
426  [data['MomentumX_dx'], data['MomentumX_dy'], data['MomentumX_dz']])
427  NablaMomentumY = data['ParticleDensity'][0] * np.array(
428  [data['MomentumY_dx'], data['MomentumY_dy'], data['MomentumY_dz']])
429  NablaMomentumZ = data['ParticleDensity'][0] * np.array(
430  [data['MomentumZ_dx'], data['MomentumZ_dy'], data['MomentumZ_dz']])
431  NablaDotStress = np.array([data['StressXX_dx'] + data['StressXY_dy'] + data['StressXZ_dz'],
432  data['StressYX_dx'] + data['StressYY_dy'] + data['StressYZ_dz'],
433  data['StressZX_dx'] + data['StressZY_dy'] + data['StressZZ_dz']])
434  else:
435  print('estimating gradient!')
436  NablaMomentumX = nabla(data['ParticleDensity'][0] * data['VolumeFraction'] * data['VelocityX'], data['x'],
437  data['y'], data['z'])
438  NablaMomentumY = nabla(data['ParticleDensity'][0] * data['VolumeFraction'] * data['VelocityY'], data['x'],
439  data['y'], data['z'])
440  NablaMomentumZ = nabla(data['ParticleDensity'][0] * data['VolumeFraction'] * data['VelocityZ'], data['x'],
441  data['y'], data['z'])
442 
443  NablaDotStress = nabla(np.array(
444  [data['StressXX'], data['StressXY'], data['StressXZ'], data['StressYY'], data['StressYZ'],
445  data['StressZZ']]), data['x'], data['y'], data['z'])
446 
447  VelocityDotNablaMomentum = np.array([np.sum(np.array([data['VelocityX'], data['VelocityX'],
448  data['VelocityZ']]) * NablaMomentumX, axis=0),
449  np.sum(np.array([data['VelocityX'], data['VelocityX'],
450  data['VelocityZ']]) * NablaMomentumY, axis=0),
451  np.sum(np.array([data['VelocityX'], data['VelocityX'],
452  data['VelocityZ']]) * NablaMomentumZ, axis=0)])
453 
454  DensityGravity = data['ParticleDensity'][0] * data['VolumeFraction'] * data['Gravity']
455 
456  Traction = np.array([data['TractionX'], data['TractionY'], data['TractionZ']])
457 
458  remainder = VelocityDotNablaMomentum - DensityGravity + NablaDotStress + Traction
459  maximum = np.max(np.array([np.max(VelocityDotNablaMomentum), np.max(DensityGravity), np.max(NablaDotStress)]))
460  return remainder, maximum
461 
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet print(const Packet &a)
Definition: GenericPacketMath.h:1166
def nabla(variable, x, y, z)
Definition: Loadstatistics.py:572
def get_momentum_equation(data)
Definition: Loadstatistics.py:423

References nabla(), and Eigen::internal.print().

Referenced by get_standard_variables().

◆ get_standard_variables()

def Loadstatistics.get_standard_variables (   data,
  opt 
)
254 def get_standard_variables(data, opt):
255  data['MomentumX'][np.isnan(data['MomentumX'])] = 0
256  data['MomentumY'][np.isnan(data['MomentumY'])] = 0
257  data['MomentumZ'][np.isnan(data['MomentumZ'])] = 0
258 
259  data['VelocityX'] = data['MomentumX'] / data['Density']
260  data['VelocityY'] = data['MomentumY'] / data['Density']
261  data['VelocityZ'] = data['MomentumZ'] / data['Density']
262 
263  data['VelocityX'][np.isnan(data['VelocityX'])] = 0
264  data['VelocityY'][np.isnan(data['VelocityY'])] = 0
265  data['VelocityZ'][np.isnan(data['VelocityZ'])] = 0
266 
267  data['Temperature'] = (data['MomentumFluxXX'] + data['MomentumFluxYY'] + data['MomentumFluxZZ']) / data[
268  'Density'] - (data['VelocityX'] ** 2 + data['VelocityY'] ** 2 + data['VelocityZ'] ** 2) / 3
269  data['Temperature'][np.isnan(data['Temperature'])] = 0
270 
271  data['Coordinationnumber'] = (data['FabricXX'] + data['FabricYY'] + data['FabricZZ']) / data['VolumeFraction']
272  data['Coordinationnumber'][np.isnan(data['Coordinationnumber'])] = 0
273 
274  if 'basic' in opt:
275  return data
276 
277  data['TractionX'] = data['NormalTractionX'] + data['TangentialTractionX']
278  data['TractionY'] = data['NormalTractionY'] + data['TangentialTractionY']
279  data['TractionZ'] = data['NormalTractionZ'] + data['TangentialTractionZ']
280  data['KineticStressXX'] = data['MomentumFluxXX'] - data['Density'] * data['VelocityX'] * data['VelocityX']
281  data['KineticStressXY'] = data['MomentumFluxXY'] - data['Density'] * data['VelocityX'] * data['VelocityY']
282  data['KineticStressXZ'] = data['MomentumFluxXZ'] - data['Density'] * data['VelocityX'] * data['VelocityZ']
283  data['KineticStressYY'] = data['MomentumFluxYY'] - data['Density'] * data['VelocityY'] * data['VelocityY']
284  data['KineticStressYZ'] = data['MomentumFluxYZ'] - data['Density'] * data['VelocityY'] * data['VelocityZ']
285  data['KineticStressZZ'] = data['MomentumFluxZZ'] - data['Density'] * data['VelocityZ'] * data['VelocityZ']
286 
287  data['ContactStressXX'] = data['NormalStressXX'] + data['TangentialStressXX']
288  data['ContactStressXY'] = data['NormalStressXY'] + data['TangentialStressXY']
289  data['ContactStressXZ'] = data['NormalStressXZ'] + data['TangentialStressXZ']
290  data['ContactStressYX'] = data['NormalStressYX'] + data['TangentialStressYX']
291  data['ContactStressYY'] = data['NormalStressYY'] + data['TangentialStressYY']
292  data['ContactStressYZ'] = data['NormalStressYZ'] + data['TangentialStressYZ']
293  data['ContactStressZX'] = data['NormalStressZX'] + data['TangentialStressZX']
294  data['ContactStressZY'] = data['NormalStressZY'] + data['TangentialStressZY']
295  data['ContactStressZZ'] = data['NormalStressZZ'] + data['TangentialStressZZ']
296 
297  data['StressXX'] = data['NormalStressXX'] + data['TangentialStressXX'] + data['KineticStressXX']
298  data['StressXY'] = data['NormalStressXY'] + data['TangentialStressXY'] + data['KineticStressXY']
299  data['StressXZ'] = data['NormalStressXZ'] + data['TangentialStressXZ'] + data['KineticStressXZ']
300  data['StressYX'] = data['NormalStressYX'] + data['TangentialStressYX'] + data['KineticStressXY']
301  data['StressYY'] = data['NormalStressYY'] + data['TangentialStressYY'] + data['KineticStressYY']
302  data['StressYZ'] = data['NormalStressYZ'] + data['TangentialStressYZ'] + data['KineticStressYZ']
303  data['StressZX'] = data['NormalStressZX'] + data['TangentialStressZX'] + data['KineticStressXZ']
304  data['StressZY'] = data['NormalStressZY'] + data['TangentialStressZY'] + data['KineticStressYZ']
305  data['StressZZ'] = data['NormalStressZZ'] + data['TangentialStressZZ'] + data['KineticStressZZ']
306  data['Pressure'] = (data['StressXX'] + data['StressYY'] + data['StressZZ']) / 3
307 
308  data['KineticPressure'] = (data['KineticStressXX'] + data['StressYY'] + data['StressZZ']) / 3
309 
310  # macroscopic friction coefficient, or deviator-stress ratio sD1:=(S1-S3)/(2*p) or sD2 (to be discussed)
311  data['MacroFrictionCoefficient'] = np.zeros(data['x'].shape)
312  data['Sd'] = np.zeros(data['x'].shape)
313  data['SdStar'] = np.zeros(data['x'].shape)
314  for i in range(len(data['x'])):
315  for j in range(len(data['x'])):
316  Stress = np.array([[data['StressXX'][j,i], data['StressXY'][j,i], data['StressXZ'][j,i]],
317  [data['StressXY'][j,i], data['StressYY'][j,i], data['StressYZ'][j,i]],
318  [data['StressXZ'][j,i], data['StressYZ'][j,i], data['StressZZ'][j,i]]])
319  if np.array_equal(Stress, np.zeros((3, 3))) or np.sum(np.isnan(Stress)) > 0:
320  data['MacroFrictionCoefficient'][j] = np.nan
321  data['Sd'][j] = np.nan
322  data['SdStar'][j] = np.nan
323  else:
324  d1, v1 = np.linalg.eig(Stress)
325  v, d = dsort_ev(v1, np.diag(d1))
326  d = np.diag(d)
327  p = np.sum(d) / 3
328  data['MacroFrictionCoefficient'][j] = -data['StressXZ'][j] / data['StressZZ'][j]
329  data['Sd'][j] = (d[0] - d[2]) / (2 * p)
330  data['SdStar'][j] = np.sqrt(((d[0] - d[2]) ** 2 + (d[1] - d[2]) ** 2 + (d[0] - d[1]) ** 2) / (6 * p ** 2))
331 
332  if 'VolumeFraction_dz' in data:
333  for i in 'XYZ':
334  for k in 'xyz':
335  vector = i + '_d' + k
336  data['Velocity' + vector] = (data['Momentum' + vector]*data['Density']-data['Momentum' + i]*data[
337  'Density_d' + k])/data['Density']**2
338  data['Velocity' + vector][np.isnan(data['Velocity' + vector])] = 0
339  for i in 'XYZ':
340  for j in 'XYZ':
341  for k in 'xyz':
342  tensor = i + j + '_d' + k
343  if i<j:
344  symtensor = tensor
345  else:
346  symtensor = tensor[2] + tensor[1] + tensor[3:]
347  data['KineticStress' + tensor] = data['MomentumFlux' + symtensor] - data['Momentum' + i + '_d' + k]*data['Velocity' + j] - data['Momentum' + j + '_d' + k]*data['Velocity' + i]
348  data['Stress' + tensor] = data['NormalStress' + tensor] + data['TangentialStress' + tensor] + data['KineticStress' + tensor]
349  if data['nz']>1 and data['nx']==1 and data['ny']==1:
350  data = get_depth_variables(data)
351  if 'ParticleDensity' in data:
352  data['MomentumEquationsRemainder'], data['MomentumEquationsMaximum'] = get_momentum_equation(data)
353  return data
354 
def get_standard_variables(data, opt)
Definition: Loadstatistics.py:254

References dsort_ev(), get_depth_variables(), and get_momentum_equation().

Referenced by load_file().

◆ load_data_file()

def Loadstatistics.load_data_file (   filename)
146 def load_data_file(filename):
147  data = {}
148  data['name'] = filename[:-5]
149  rawdata = pd.read_csv(filename, delim_whitespace=True, skiprows=17)
150  data['variable_names'] = ['VolumeFraction', 'Density', 'MomentumX', 'MomentumY', 'MomentumZ', 'MomentumFluxXX', 'MomentumFluxXY', 'MomentumFluxXZ', 'MomentumFluxYY', 'MomentumFluxYZ', 'MomentumFluxZZ', 'EnergyFluxX', 'EnergyFluxY', 'EnergyFluxZ', 'NormalStressXX', 'NormalStressXY', 'NormalStressXZ', 'NormalStressYY', 'NormalStressYZ', 'NormalStressZZ', 'TangentialStressXX', 'TangentialStressXY', 'TangentialStressXZ', 'TangentialStressYY', 'TangentialStressYZ', 'TangentialStressZZ', 'FabricXX', 'FabricXY', 'FabricXZ', 'FabricYY', 'FabricYZ', 'FabricZZ', 'CollisionalHeatFluxX', 'CollisionalHeatFluxY', 'CollisionalHeatFluxZ', 'Dissipation']
151  # data['text'] = rawdata.iloc[0,4] + str(rawdata.iloc[0,8]*100)
152  rho = rawdata.iloc[1,4]
153  data['time'] = 0
154  data['coordinates'] = rawdata.iloc[:,3:6]
155  VolumeFraction = np.maximum(rawdata.iloc[:,10], 1e-200*np.ones(rawdata.shape[0]))
156  data['variables'] = pd.DataFrame(np.concatenate((VolumeFraction,
157  VolumeFraction*rho,
158  rawdata.iloc[:,51:53].multiply(VolumeFraction.values[:,np.newaxis]),
159  rawdata.iloc[:,[41,42,43,45,46,49]],
160  np.nan*np.ones((rawdata.shape[0],3)),
161  rawdata.iloc[:,[21,22,23,25,26,29]],
162  rawdata.iloc[:,[31,32,33,35,36,39]],
163  rawdata.iloc[:,[71,72,73,75,76,79]],
164  np.nan*np.ones((rawdata.shape[0],4))), axis=1))
165  return data
166 
167 # def load_data_file(filename):
168 # data = {}
169 # data['name'] = filename[:-5]
170 #
171 # # load raw data from file, with the first three lines as header files (such
172 # # that matlab recognizes the array size)
173 # rawdata = np.loadtxt(filename,delimiter=' ', skiprows=17)
174 #
175 # # reads variable names from line 1 and text output
176 # # from line 2
177 # data['variable_names'] = ['VolumeFraction', 'Density', 'MomentumX', 'MomentumY', 'MomentumZ', 'MomentumFluxXX',
178 # 'MomentumFluxXY', 'MomentumFluxXZ', 'MomentumFluxYY', 'MomentumFluxYZ', 'MomentumFluxZZ',
179 # 'EnergyFluxX', 'EnergyFluxY', 'EnergyFluxZ', 'NormalStressXX', 'NormalStressXY',
180 # 'NormalStressXZ', 'NormalStressYY', 'NormalStressYZ', 'NormalStressZZ',
181 # 'TangentialStressXX', 'TangentialStressXY', 'TangentialStressXZ', 'TangentialStressYY',
182 # 'TangentialStressYZ', 'TangentialStressZZ', 'FabricXX', 'FabricXY', 'FabricXZ',
183 # 'FabricYY', 'FabricYZ', 'FabricZZ', 'CollisionalHeatFluxX', 'CollisionalHeatFluxY',
184 # 'CollisionalHeatFluxZ', 'Dissipation']
185 # with open(filename, 'r') as f:
186 # text = f.readlines()
187 # data['text'] = text[7][4:7] + str(float(text[7][8:12]) * 100)
188 # rho_text = text[10].split()
189 # rho = float(rho_text[-1])
190 #
191 # # writes the raw data into bits of data for each timestep
192 # # index_time = [find(isnan(rawdata.data(:,2))); size(rawdata.data,1)+1];
193 # data['time'] = 0
194 # data['coordinates'] = rawdata[:, 3:6]
195 # VolumeFraction = np.maximum(rawdata[:, 9], 1e-200 * np.ones((rawdata.shape[0], 1)))
196 # data['variables'] = [VolumeFraction, VolumeFraction * rho, rawdata.data[:, 51:53] * VolumeFraction[:, 1, 1,
197 # 1] * rho,
198 # rawdata.data[:, 41:43, 45, 46, 49], np.nan(np.size(rawdata.data, 1), 3),
199 # rawdata.data[:, 21:23, 25, 26, 29], rawdata.data[:, 31:33, 35, 36, 39],
200 # rawdata.data[:, 71:73, 75, 76, 79], np.nan(np.size(rawdata.data, 1), 4)]
201 # return data
202 
def load_data_file(filename)
Definition: Loadstatistics.py:146
void multiply(const CRDoubleMatrix *matrix, const DoubleVector &x, DoubleVector &soln)
Definition: trilinos_helpers.cc:657

References oomph::TrilinosEpetraHelpers.multiply().

Referenced by load_file().

◆ load_file()

def Loadstatistics.load_file (   filename,
  opt 
)
40 def load_file(filename,opt):
41  print('loading data from ' + filename)
42  if filename[-5:] == '.data':
43  data = load_data_file(filename)
44  else:
45  data = load_stat_file(filename)
46  if not isinstance(data,list):
47  data = make_readable(data)
48  if 'basic' not in opt:
49  data = get_standard_variables(data,opt)
50  else:
51  for i in range(len(data)):
52  data[i] = make_readable(data[i])
53  if 'basic' not in opt:
54  data[i] = get_standard_variables(data[i],opt)
55  # # Transpose arrays in the data dictionary; when converting this file to Python
56  # # it swapped indices compared to Matlab
57  # for key, value in data.items():
58  # if isinstance(value,np.ndarray):
59  # if len(value.shape) > 1:
60  # if value.shape[1] == value.shape[0]:
61  # data[key] = value.transpose()
62  return data
63 
64 
def load_stat_file(filename)
Definition: Loadstatistics.py:65
def make_readable(data)
Definition: Loadstatistics.py:203
def load_file(filename, opt)
Definition: Loadstatistics.py:40

References get_standard_variables(), load_data_file(), load_stat_file(), make_readable(), and Eigen::internal.print().

Referenced by load_statistics().

◆ load_stat_file()

def Loadstatistics.load_stat_file (   filename)
65 def load_stat_file(filename):
66  data = {}
67  data['name'] = filename[:filename.find('.stat')]
68 
69  rawdata = np.genfromtxt(filename, skip_header=3)
70 
71  if rawdata.ndim == 0:
72  return data
73 
74  if rawdata.shape[1] == 26:
75  # for old .stat files
76  print('WARNING: outdated stat file')
77  rawdata = np.hstack(
78  (rawdata[:, :14], np.zeros((rawdata.shape[0], 3)), rawdata[:, 15:], np.zeros((rawdata.shape[0], 10))))
79  data['variable_names'] = ['VolumeFraction', 'Density',
80  'MomentumX', 'MomentumY', 'MomentumZ',
81  'MomentumFluxXX', 'MomentumFluxXY', 'MomentumFluxXZ', 'MomentumFluxYY', 'MomentumFluxYZ', 'MomentumFluxZZ',
82  'EnergyFluxX', 'EnergyFluxY', 'EnergyFluxZ',
83  'NormalStressXX', 'NormalStressXY', 'NormalStressXZ', 'NormalStressYY', 'NormalStressYZ', 'NormalStressZZ',
84  'TangentialStressXX', 'TangentialStressXY', 'TangentialStressXZ', 'TangentialStressYY',
85  'TangentialStressYZ', 'TangentialStressZZ',
86  'FabricXX', 'FabricXY', 'FabricXZ', 'FabricYY', 'FabricYZ', 'FabricZZ',
87  'CollisionalHeatFluxX', 'CollisionalHeatFluxY', 'CollisionalHeatFluxZ',
88  'Dissipation',]
89  else:
90  variable_names = np.genfromtxt(filename, skip_header=0, max_rows=1, dtype=str)
91  data['variable_names'] = variable_names
92 
93  # also allows input from the restart and ene files if they exist
94  data = read_restart(filename, data)
95  data = read_ene(filename, data)
96 
97  text = np.genfromtxt(filename, skip_header=1, max_rows=1, dtype=str)
98  data['text'] = text
99 
100  index_time = np.where(np.isnan(rawdata[:, 2]))[0]
101  index_time = np.append(index_time, rawdata.shape[0])
102  data['time'] = np.genfromtxt(filename, skip_header=2, max_rows=1)
103  data['w'] = np.genfromtxt(filename, skip_header=1, max_rows=1, usecols=1)
104 
105  data['coordinates'] = rawdata[:index_time[0], :3]
106  doGradient = any(['doGradient' in t for t in text])
107  doVariance = any(['doVariance' in t for t in text])
108 
109  if len(index_time) == 1:
110  data['variables'] = rawdata[:index_time[0], 3:]
111  elif doVariance and len(index_time) == 2:
112  data['variables'] = rawdata[:index_time[0], 3:]
113  data['variance'] = rawdata[index_time[0] + 1:index_time[1], 3:]
114  elif doGradient and len(index_time) == 4:
115  data['variables'] = rawdata[:index_time[0], 3:]
116  data['gradx'] = rawdata[index_time[0] + 1:index_time[1], 3:]
117  data['grady'] = rawdata[index_time[1] + 1:index_time[2], 3:]
118  data['gradz'] = rawdata[index_time[2] + 1:index_time[3], 3:]
119  elif not doGradient:
120  dataTemplate = data
121  data = [dataTemplate]
122  data[0]['variables'] = rawdata[:index_time[0], 3:]
123  for i in range(1, len(index_time) - 1):
124  data.append(dataTemplate)
125  data[i]['time'] = rawdata[index_time[i - 1], :2]
126  data[i]['variables'] = rawdata[index_time[i - 1] + 1:index_time[i], 3:]
127  print('multiple time steps ({}); creating cell output'.format(len(index_time) - 1))
128  else:
129  dataTemplate = data
130  data = [dataTemplate]
131  data[0]['variables'] = rawdata[:index_time[0], 3:]
132  data[0]['gradx'] = rawdata[index_time[0] + 1:index_time[1], 3:]
133  data[0]['grady'] = rawdata[index_time[1] + 1:index_time[2], 3:]
134  data[0]['gradz'] = rawdata[index_time[2] + 1:index_time[3], 3:]
135  for i in range(8, len(index_time), 4):
136  data.append(dataTemplate)
137  data[i / 4]['time'] = rawdata[index_time[i - 4], :2]
138  data[i / 4]['variables'] = rawdata[index_time[i - 4] + 1:index_time[i - 3], 3:]
139  data[i / 4]['gradx'] = rawdata[index_time[i - 3] + 1:index_time[i - 2], 3:]
140  data[i / 4]['grady'] = rawdata[index_time[i - 2] + 1:index_time[i - 1], 3:]
141  data[i / 4]['gradz'] = rawdata[index_time[i - 1] + 1:index_time[i], 3:]
142  print('multiple time steps ({}); creating cell output'.format(len(index_time) / 4))
143 
144  return data
145 
def read_ene(statname, data)
Definition: Loadstatistics.py:462
def read_restart(statname, data)
Definition: Loadstatistics.py:499
std::string format(const std::string &str, const std::vector< std::string > &find, const std::vector< std::string > &replace)
Definition: openglsupport.cpp:217

References format(), Eigen::internal.print(), read_ene(), and read_restart().

Referenced by load_file().

◆ load_statistics()

def Loadstatistics.load_statistics (   filenames,
  opt = None 
)
28 def load_statistics(filenames,opt=None):
29  if opt is None:
30  opt = {}
31  if isinstance(filenames,list):
32  data = [load_file(filename,opt) for filename in filenames]
33  else:
34  if '?' not in filenames and '*' not in filenames:
35  data = load_file(filenames,opt)
36  else:
37  data = load_statistics(glob.glob(filenames),opt)
38  return data
39 
def load_statistics(filenames, opt=None)
Definition: Loadstatistics.py:28

References load_file().

◆ make_readable()

def Loadstatistics.make_readable (   data)
203 def make_readable(data):
204  data['nx'] = int(len(data['coordinates'][:, 0]) / int(sum(data['coordinates'][:, 0] == data['coordinates'][0, 0])))
205  data['ny'] = int(len(data['coordinates'][:, 1]) / int(sum(data['coordinates'][:, 1] == data['coordinates'][0, 1])))
206  data['nz'] = int(len(data['coordinates'][:, 2]) / int(sum(data['coordinates'][:, 2] == data['coordinates'][0, 2])))
207  shape = np.shape(np.squeeze(np.zeros((data['nz'], data['ny'], data['nx']))))
208  if (np.prod(shape) != np.shape(data['coordinates'])[0]):
209  print('Warning: cannot put xyz values on mesh')
210  shape = (np.shape(data['coordinates'])[0], 1)
211  data['x'] = np.reshape(data['coordinates'][:, 0], shape)
212  data['y'] = np.reshape(data['coordinates'][:, 1], shape)
213  data['z'] = np.reshape(data['coordinates'][:, 2], shape)
214  del data['coordinates']
215 
216  # the array 'variable' is expanded into smaller arrays, each containing one
217  # variable only (f.e. Density), with the field names defined in
218  # variable_names.
219  # If available, variance, gradx, grady, gradz are also expanded the same way
220 
221  if 'variance' in data:
222  for i in range(len(data.variable_names)):
223  data[data['variable_names'][i].split()[0] + '_var'] = np.reshape(data['variance'][:, i], shape)
224  del data['variance']
225 
226  if 'gradx' in data:
227  for i in range(len(data['variable_names'])):
228  data[data['variable_names'][i].split()[0] + '_dx'] = np.reshape(data['gradx'][:, i], shape)
229  data[data['variable_names'][i].split()[0] + '_dy'] = np.reshape(data['grady'][:, i], shape)
230  data[data['variable_names'][i].split()[0] + '_dz'] = np.reshape(data['gradz'][:, i], shape)
231  del data['gradx']
232  del data['grady']
233  del data['gradz']
234 
235  for i in range(len(data['variable_names'])):
236  data[data['variable_names'][i].split()[0]] = np.reshape(data['variables'][:, i], shape)
237  del data['variables']
238  del data['variable_names']
239 
240  if 'variance' in data:
241  data['VelocityX_var'] = (data['MomentumX'] / data['Density)']) ** 2 * (
242  data['MomentumX_var'] / data['MomentumX'] ** 2 + data['Density_var'] / data['Density'] ** 2)
243  data['VelocityY_var'] = (data['MomentumY'] / data['Density']) ** 2 * (
244  data['MomentumY_var'] / data['MomentumY'] ** 2 + data['Density_var'] / data['Density'] ** 2)
245  data['VelocityZ_var'] = (data['MomentumZ'] / data['Density']) ** 2 * (
246  data['MomentumZ_var'] / data['MomentumZ'] ** 2 + data['Density_var'] / data['Density'] ** 2)
247 
248  if 'Nu' in data:
249  data['VolumeFraction'] = data['Nu']
250  del data['Nu']
251  return data
252 
253 
return int(ret)+1
void split(const DoubleVector &in_vector, Vector< DoubleVector * > &out_vector_pt)
Definition: double_vector.cc:1413

References int(), Eigen::internal.print(), and oomph::DoubleVectorHelpers.split().

Referenced by load_file().

◆ nabla()

def Loadstatistics.nabla (   variable,
  x,
  y,
  z 
)
572 def nabla(variable, x, y, z):
573  # stores the grid dimensions
574  n = [len(x) / sum(x[0] == x), len(y) / sum(y[0] == y), len(z) / sum(z[0] == z)]
575 
576  # stores coordinates in grid
577  X = np.zeros(n[::-1])
578  X[:] = x
579  Y = np.zeros(n[::-1])
580  Y[:] = y
581  Z = np.zeros(n[::-1])
582  Z[:] = z
583  V = np.zeros(n[::-1])
584 
585  # stores differentials (small number if X/Y/Z is constant)
586  dX = np.maximum(1e-60, (X[:, :, 2:] - X[:, :, :-1]))
587  dY = np.maximum(1e-60, (Y[:, 2:, :] - Y[:, :-1, :]))
588  dZ = np.maximum(1e-60, (Z[2:, :, :] - Z[:-1, :, :]))
589 
590  if variable.shape[1] == 1: # scalar variable
591  V[:] = variable
592  dXV = (V[:, :, 2:] - V[:, :, :-1]) / dX
593  dYV = (V[:, 2:, :] - V[:, :-1, :]) / dY
594  dZV = (V[2:, :, :] - V[:-1, :, :]) / dZ
595 
596  NablaVariable = np.vstack((dXV.flatten(), dYV.flatten(), dZV.flatten())).T
597  elif variable.shape[1] == 3: # vector
598  V[:] = variable[:, 0]
599  dXV = (V[:, :, 2:] - V[:, :, :-1]) / dX
600  V[:] = variable[:, 1]
601  dYV = (V[:, 2:, :] - V[:, :-1, :]) / dY
602  V[:] = variable[:, 2]
603  dZV = (V[2:, :, :] - V[:-1, :, :]) / dZ
604 
605  NablaVariable = dXV.flatten() + dYV.flatten() + dZV.flatten()
606  elif variable.shape[1] == 6: # symmetric matrix
607  V[:] = variable[:, 0]
608  dXVXX = (V[:, :, 2:] - V[:, :, :-1]) / dX
609  V[:] = variable[:, 1]
610  dXVYX = (V[:, :, 2:] - V[:, :, :-1]) / dX
611  V[:] = variable[:, 2]
612  dXVZX = (V[:, :, 2:] - V[:, :, :-1]) / dX
613 
614  V[:] = variable[:, 1]
615  dYVXY = (V[:, 2:, :] - V[:, :-1, :]) / dY
616  V[:] = variable[:, 3]
617  dYVYY = (V[:, 2:, :] - V[:, :-1, :]) / dY
618  V[:] = variable[:, 4]
619  dYVZY = (V[:, 2:, :] - V[:, :-1, :]) / dY
620 
621  V[:] = variable[:, 2]
622  dZVXZ = (V[2:, :, :] - V[:-1, :, :]) / dZ
623  V[:] = variable[:, 4]
624  dZVYZ = (V[2:, :, :] - V[:-1, :, :]) / dZ
625  V[:] = variable[:, 5]
626  dZVZZ = (V[2:, :, :] - V[:-1, :, :]) / dZ
627 
628  NablaVariable = np.vstack((dXVXX.flatten() + dYVXY.flatten() + dZVXZ.flatten(),
629  dXVYX.flatten() + dYVYY.flatten() + dZVYZ.flatten(),
630  dXVZX.flatten() + dYVZY.flatten() + dZVZZ.flatten())).T
631  else:
632  print('Error')
633 
634  return NablaVariable
635 
636 

References Eigen::internal.print().

Referenced by get_momentum_equation().

◆ nablaCenter()

def Loadstatistics.nablaCenter (   variable,
  x,
  y,
  z 
)
637 def nablaCenter(variable, x, y, z):
638  # stores the grid dimensions
639  n = [len(x), len(y), len(z)]
640 
641  # stores coordinates in grid
642  X = np.zeros(n[::-1])
643  X.flat = x
644  Y = np.zeros(n[::-1])
645  Y.flat = y
646  Z = np.zeros(n[::-1])
647  Z.flat = z
648  V = np.zeros(n[::-1])
649 
650  # stores differentials (small number if X/Y/Z is constant)
651  dX = np.maximum(1e-60, (X[:, :, 1:] - X[:, :, :-1]))
652  dY = np.maximum(1e-60, (Y[:, 1:] - Y[:, :-1]))
653  dZ = np.maximum(1e-60, (Z[1:] - Z[:-1]))
654 
655  if variable.shape[1] == 1: # scalar variable
656  V.flat = variable
657  dXV = (V[:, :, 1:] - V[:, :, :-1]) / dX
658  dYV = (V[:, 1:] - V[:, :-1]) / dY
659  dZV = (V[1:] - V[:-1]) / dZ
660 
661  NablaVariable = np.vstack((dXV.flat, dYV.flat, dZV.flat)).T
662  elif variable.shape[1] == 3: # vector
663  V.flat = variable[:, 0]
664  dXV = (V[:, :, 1:] - V[:, :, :-1]) / dX
665  V.flat = variable[:, 1]
666  dYV = (V[:, 1:] - V[:, :-1]) / dY
667  V.flat = variable[:, 2]
668  dZV = (V[1:] - V[:-1]) / dZ
669 
670  NablaVariable = dXV.flat + dYV.flat + dZV.flat
671  elif variable.shape[1] == 6: # symmetric matrix
672  V.flat = variable[:, 0]
673  dXVXX = (V[:, :, 1:] - V[:, :, :-1]) / dX
674  V.flat = variable[:, 1]
675  dXVYX = (V[:, :, 1:] - V[:, :, :-1]) / dX
676  V.flat = variable[:, 2]
677  dXVZX = (V[:, :, 1:] - V[:, :, :-1]) / dX
678 
679  V.flat = variable[:, 1]
680  dYVXY = (V[:, 1:] - V[:, :-1]) / dY
681  V.flat = variable[:, 3]
682  dYVYY = (V[:, 1:] - V[:, :-1]) / dY
683  V.flat = variable[:, 4]
684  dYVZY = (V[:, 1:] - V[:, :-1]) / dY
685 
686  V.flat = variable[:, 2]
687  dZVXZ = (V[1:] - V[:-1]) / dZ
688  V.flat = variable[:, 4]
689  dZVYZ = (V[1:] - V[:-1]) / dZ
690  V.flat = variable[:, 5]
691  dZVZZ = (V[1:] - V[:-1]) / dZ
692 
693  NablaVariable = np.vstack((dXVXX.flat + dYVXY.flat + dZVXZ.flat, dXVYX.flat + dYVYY.flat + dZVYZ.flat,
694  dXVZX.flat + dYVZY.flat + dZVZZ.flat)).T
695  else:
696  print('Error')
697  return NablaVariable
def nablaCenter(variable, x, y, z)
Definition: Loadstatistics.py:637

References Eigen::internal.print().

◆ read_ene()

def Loadstatistics.read_ene (   statname,
  data 
)
462 def read_ene(statname,data):
463  #load ene data
464  filename = statname[:-5] + '.ene'
465  if not os.path.isfile(filename):
466  #if unsuccessful, load ene data with last part of filename removed
467  dots = statname.count('.')
468  if dots > 1:
469  splitfilename = os.path.splitext(filename)
470  for dot in range(1,dots):
471  splitfilename = os.path.splitext(splitfilename[0])
472  filename = splitfilename[0] + '.ene'
473  if not os.path.isfile(filename):
474  print(filename + ' not found')
475  else:
476  #if unsuccessful, give up
477  print(statname[:-5] + '.ene not found, using ' + filename + ' instead')
478  if os.path.isfile(filename):
479  # load raw data from file, with the first three lines as header files
480  # (such that matlab recognizes the array size)
481  rawdata = np.genfromtxt(filename,skip_header=1)
482  # if tabs are used as delimiters
483  if rawdata.shape[1] != 8:
484  rawdata = np.genfromtxt(filename,delimiter='\t',skip_header=1)
485  if rawdata.shape[1] != 8:
486  print(filename + ' not readable')
487  return
488  data['Ene'] = dict()
489  data['Ene']['Time'] = rawdata[:,0]
490  data['Ene']['Gra'] = rawdata[:,1]
491  data['Ene']['Kin'] = rawdata[:,2]
492  data['Ene']['Rot'] = rawdata[:,3]
493  data['Ene']['Ela'] = rawdata[:,4]
494  data['Ene']['ComX'] = rawdata[:,5]
495  data['Ene']['ComY'] = rawdata[:,6]
496  data['Ene']['ComZ'] = rawdata[:,7]
497  return data
498 

References Eigen::internal.print().

Referenced by load_stat_file().

◆ read_restart()

def Loadstatistics.read_restart (   statname,
  data 
)
499 def read_restart(statname,data):
500  #load restart data
501  filename = statname.replace('.stat','.restart')
502  if not os.path.isfile(filename):
503  # if name.restart could not be opened, it maybe because name has an appendix
504  # (e.g. problem.X.stat tries to load problem.X.restart).
505  # Thus, try loading restart data with last part of name removed
506  # (e.g. load problem.restart)
507  # if unsuccessful, load ene data with last part of filename removed
508  dots = statname.count('.')
509  if dots > 1:
510  splitfilename = os.path.splitext(filename)
511  for dot in range(1, dots):
512  splitfilename = os.path.splitext(splitfilename[0])
513  filename = splitfilename[0] + '.restart'
514  if not os.path.isfile(filename):
515  print(filename + ' not found')
516  return data
517  if os.path.isfile(filename):
518  # read the file into a numpy array
519  with open(filename, 'r') as f:
520  lines = f.readlines()
521  tdata = []
522  for line in lines:
523  splitLines = line.split()
524  tdata.extend(splitLines)
525  # parse the file into a dictionary
526  #this assumes monodispersed particles
527  if (len(tdata[0])==1):
528  #old restart files
529  data.ParticleDensity = float(tdata[28])
530  data.d = 2*float(tdata[27])
531  data.Gravity = np.array([float(tdata[1]),float(tdata[2]),float(tdata[3])])
532  data.N = float(tdata[-4])
533  data.Domain=np.array([float(tdata[4]),float(tdata[5]),float(tdata[6]),float(tdata[7]),float(tdata[8]),float(tdata[9])])
534  else:
535  #split tdata into multiple strings
536  #new restart version
537  i = [j for j, x in enumerate(tdata) if x == "density"]
538  if i==[]: i = [j for j, x in enumerate(tdata) if x == "rho"]
539  if len(i)>1: print('multiple species detected')
540  if i!=[]: data["ParticleDensity"] = np.array([tdata[j+1] for j in i]).astype(float)
541  i = [j for j, x in enumerate(tdata) if x == 'slidingFrictionCoefficient']
542  if i==[]: i = [j for j, x in enumerate(tdata) if x == 'mu']
543  if i!=[]: data["Mu"] = np.array([tdata[j+1] for j in i]).astype(float)
544  i = [j for j, x in enumerate(tdata) if x == 'gravity']
545  if i!=[]: data["Gravity"] = np.array([[tdata[j+1],tdata[j+2],tdata[j+3]] for j in i]).astype(float)
546  i = [j for j, x in enumerate(tdata) if x == 'Particles']
547  if i!=[]: data["N"] = float(np.array([tdata[j+1] for j in i]))
548  i = [j for j, x in enumerate(tdata) if x == 'xMin']
549  if i==[]: i = [j for j, x in enumerate(tdata) if x == 'xmin']
550  if i!=[]: data["Domain"] = np.array([[tdata[j+1],tdata[j+3],tdata[j+5],tdata[j+7],tdata[j+9],tdata[j+11]]
551  for j in i]).astype(float)
552  i = [j for j, x in enumerate(tdata) if x == 'FixedParticleRadius']
553  if i!=[]: data["FixedParticleRadius"] = np.array([tdata[j+1] for j in i]).astype(float)
554  i = [j for j, x in enumerate(tdata) if x == 'MinInflowParticleRadius']
555  if i!=[]:
556  data["InflowParticleRadius"] = np.array([[tdata[j+1],tdata[j+3]] for j in i]).astype(float)
557  data["d"] = sum(data["InflowParticleRadius"])
558  else:
559  i = [j for j, x in enumerate(tdata) if x == 'Particles']
560  if i!=[]:
561  if ([tdata[j+1] for j in i] == 0):
562  data["d"] = np.nan
563  else:
564  k = [min([j for j, x in enumerate([tdata[v+2:] for v in i][0]) if x == 'radius'])]
565  data["d"] = 2 * float(np.array([tdata[j+v+3] for j,v in zip(i,k)]))
566  if 'd' in data: data["ParticleVolume"] = np.pi/6*data["d"]**3
567  if 'Domain' in data: data["DomainVolume"] = np.prod(data["Domain"][0,1::2].astype(float)-data["Domain"][0,::2].astype(float))
568  if 'Gravity' in data: data["ChuteAngle"] = round(np.arctan(-data["Gravity"][0,0].astype(float)/data[
569  "Gravity"][0,2].astype(float))*400)/400
570  return data
571 
#define min(a, b)
Definition: datatypes.h:22
EIGEN_STRONG_INLINE EIGEN_DEVICE_FUNC bfloat16 round(const bfloat16 &a)
Definition: BFloat16.h:646

References min, Eigen::internal.print(), and Eigen::bfloat16_impl.round().

Referenced by load_stat_file().