CombineParallelRestartFiles Namespace Reference

Functions

def main ()
 

Function Documentation

◆ main()

def CombineParallelRestartFiles.main ( )
18 def main():
19  if len(sys.argv) < 2:
20  raise Exception("""
21 You need to provide command line arguments:
22 - In case of restart files of the form myRestartFile.restart0, etc.:
23  ./CombineParallelRestartFiles.py myRestartFile.restart
24 - In case of restart files of the form myRestartFile.restart0.123456, etc:
25  ./CombineParallelRestartFiles.py myRestartFile.restart 123456
26  """)
27 
28  # the user gives the base of the restart-file name, we find the restart files of all the cores
29  # the argument should include the .restart!
30  restart_file_base = sys.argv[1]
31  files = sorted([file for file in os.listdir(".") if file.startswith(restart_file_base)])
32 
33  # the user can give the root name of the outfile; note it should not be the same as the in file
34  if len(sys.argv) > 3:
35  out_file_name_base = sys.argv[3]
36  if restart_file_base==out_file_name_base:
37  raise Exception("Name of out-file should not be same as in-file")
38  else:
39  out_file_name_base = "combined" + restart_file_base
40  out_file_name = out_file_name_base
41 
42  # Optionally, the user can give a number s.t. all the files that end with that number will be combined. Necessary
43  # if the files are generated with FileType::MULTIPLE_FILES or FileType::MULTIPLE_FILES_PADDED
44  if len(sys.argv) > 2:
45  restart_file_number = sys.argv[2]
46  files = sorted([file for file in files if file.endswith("."+restart_file_number)])
47  out_file_name += "."+restart_file_number
48 
49  # construct the combined file
50  out_file = open(out_file_name, 'w')
51 
52  # The algorithm below is very naive, but works: first construct the header, particles, interactions and footer
53  # separately, then write them all to the new file
54  header = []
55  particles = []
56  interactions = []
57  footer = []
58  isParticle = []
59  footer_written = False
60 
61  # Construct the header (everything before the particles) and determine the line number where the particles start
62  #print ("pre-reading %s" % files[0])
63  file0 = open(files[0])
64  file0 = file0.readlines()
65 
66  index_particles = 0
67  while 'Particles ' not in file0[index_particles]:
68  if 'restartFile name ' in file0[index_particles]:
69  # replace restart file name
70  i = file0[index_particles].find(" name ")
71  j = file0[index_particles].find(" fileType ")
72  file0[index_particles] = file0[index_particles][:i+6] + out_file_name_base + file0[index_particles][j:]
73  #print("replaced line: " + file0[index_particles][:-1])
74  if 'numberOfProcessors ' in file0[index_particles]:
75  i = file0[index_particles].find('numberOfProcessors ')
76  # remove text relating to the parallel output
77  # (e.g. "numberOfProcessors 16 numberOfDomains 4 4 1")
78  file0[index_particles] = file0[index_particles][:i] + '\n'
79  #print("replaced line: " + file0[index_particles][:-1])
80  #write the header from file 0 to the coombined file
81  header.append(file0[index_particles])
82  index_particles += 1
83 
84  # For all files, collect the particle and interaction data. Also collect the footer information once.
85  for file in files:
86  # Open the file and mold it into a list of lists of all the 'words' in the file
87  file0 = open(file)
88  print ("reading %s" % file)
89  file0 = file0.readlines()
90  file0_split = [line.split() for line in file0]
91 
92  # Collect all particles (we cannot directly write to file, as we need the number of particles)
93  index_particles_end = index_particles + int(file0_split[index_particles][1]) + 1
94  for i in range(index_particles + 1, index_particles_end):
95  particles.append(file0[i])
96  #add id to list
97  id = int(file0_split[i][2])
98  while id>=len(isParticle): isParticle.append(False)
99  isParticle[id] = True
100 
101  # Collect all interactions
102  index_interactions_end = index_particles_end + int(file0_split[index_particles_end][1]) + 1
103  for i in range(index_particles_end + 1, index_interactions_end):
104  p = int(file0_split[i][2])
105  q = int(file0_split[i][3])
106  # reject if one particle does not exist yet (i.e. if one particle is on a future file
107  # then doubled interaction will not be written
108  if (p<0 or (p<len(isParticle) and isParticle[p])) and (q<0 or (q<len(isParticle) and isParticle[q])):
109  interactions.append(file0[i])
110 
111  # Collect everything below the interactions, such as h-grid info, chute line
112  if not footer_written:
113  footer_written = True
114  for i in range(index_interactions_end, len(file0_split)):
115  footer.append(file0[i])
116 
117  # Finally, write everything to the combined restart file
118  out_file.write("".join(header))
119  out_file.write("Particles " + str(len(particles)) + "\n")
120  out_file.write("".join(particles))
121  out_file.write("Interactions " + str(len(interactions)) + "\n")
122  out_file.write("".join(interactions))
123  out_file.write("".join(footer))
124  print("written %d particles, %d interactions to %s" % (len(particles), len(interactions), out_file.name))
125 
return int(ret)+1
def main()
Definition: CombineParallelRestartFiles.py:18
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet print(const Packet &a)
Definition: GenericPacketMath.h:1166
str
Definition: compute_granudrum_aor.py:141

References int(), Eigen::internal.print(), and compute_granudrum_aor.str.