plotResults.Analysis Class Reference

Public Member Functions

def __init__ (self, smcTest, smcSamples, obsNames, paramNames, posterior, bestId)
 

Public Attributes

 smcTest
 
 smcSamples
 
 obsNames
 
 paramNames
 
 posterior
 
 bestId
 

Constructor & Destructor Documentation

◆ __init__()

def plotResults.Analysis.__init__ (   self,
  smcTest,
  smcSamples,
  obsNames,
  paramNames,
  posterior,
  bestId 
)
27  def __init__(self, smcTest, smcSamples, obsNames, paramNames, posterior, bestId):
28  self.smcTest = smcTest
29  self.smcSamples = smcSamples
30  self.obsNames = obsNames
31  self.paramNames = paramNames
32  self.posterior = posterior
33  self.bestId = bestId
34 
35 
36 # # get weight from files
37 # def getWeight(wName):
38 # # get weight
39 # wFile = open(wName, "r")
40 # wLines = wFile.read().splitlines()
41 # wFile.close()
42 # wData = []
43 # # extract weights at specified step
44 # for i in range(len(wLines)): wData.append(np.float64(wLines[i].split()))
45 # return wData
46 #
47 #
48 # def plotIPs(names, ips, covs, nStep, weight, params):
49 # # plot weighted average for each parameter
50 # plt.figure('Posterior means over parameters')
51 # for i in range(3):
52 # plt.subplot(2, 2, i + 1)
53 # plt.plot(ips[:, i])
54 # plt.xlabel('loading step')
55 # plt.ylabel(r'$|' + names[i] + r'|$')
56 # plt.grid(True)
57 # plt.tight_layout()
58 #
59 # # plot coefficient of variance for each parameter
60 # plt.figure('Posterior coefficients of variance over parameters')
61 # for i in range(3):
62 # plt.subplot(2, 2, i + 1)
63 # plt.plot(covs[:, i])
64 # plt.xlabel('loading step')
65 # plt.ylabel(r'$COV(' + names[i] + ')$')
66 # plt.grid(True)
67 # plt.tight_layout()
68 #
69 # # plot probability density function of identified parameters
70 # for i, name in enumerate(names):
71 # plt.figure('PDF of ' + name)
72 # for j in range(6):
73 # plt.subplot(2, 3, j + 1)
74 # plt.plot(params[:, i], weight[:, int(nStep * (j + 1) / 6 - 1)], 'o')
75 # plt.title('NO.%3i loading step' % (int(nStep * (j + 1) / 6 - 1)))
76 # plt.xlabel(r'$' + name + '$')
77 # plt.ylabel('Posterior PDF')
78 # plt.grid(True)
79 # # plt.tight_layout()
80 #
81 # # get ensemble average
82 # enAvg0 = params[:, 0].dot(weight)
83 # enAvg1 = params[:, 1].dot(weight)
84 # enAvg2 = params[:, 2].dot(weight)
85 # # enAvg3 = params[:,3].dot(weight)
86 # numOfObs = enAvg0.shape
87 # # get diagonal variance
88 # enVar0 = np.zeros(numOfObs)
89 # enVar1 = np.zeros(numOfObs)
90 # enVar2 = np.zeros(numOfObs)
91 # # enVar3 = np.zeros(numOfObs)
92 # for n in range(numOfObs[0]):
93 # enVar0[n] = ((params[:, 0] - enAvg0[n]) ** 2).dot(weight[:, n])
94 # enVar1[n] = ((params[:, 1] - enAvg1[n]) ** 2).dot(weight[:, n])
95 # enVar2[n] = ((params[:, 2] - enAvg2[n]) ** 2).dot(weight[:, n])
96 # # enVar3[n] = ((params[:,3]-enAvg3[n])**2).dot(weight[:,n])
97 # # get standard deviation
98 # enStd0 = np.sqrt(enVar0)
99 # enStd1 = np.sqrt(enVar1)
100 # enStd2 = np.sqrt(enVar2)
101 # # enStd3 = np.sqrt(enVar3)
102 #
103 # plt.show(block=False)
104 #
105 # return enAvg0, enStd0, enAvg1, enStd1, enAvg2, enStd2 # , enAvg3, enStd3
106 #
107 #
108 # def plotAllSamples(smcSamples, names):
109 # numOfIters = len(smcSamples)
110 # plt.figure('Resampled parameter space')
111 # plt.subplot(121)
112 # for i in range(numOfIters): plt.plot(smcSamples[i][:, 0], smcSamples[i][:, 1], 'o', label='iterNO.0')
113 # plt.xlabel(r'$' + names[0] + '$')
114 # plt.ylabel(r'$' + names[1] + '$')
115 # plt.legend()
116 # plt.subplot(122)
117 # for i in range(numOfIters): plt.plot(smcSamples[i][:, 1], smcSamples[i][:, 2], 'o', label='iterNO.0')
118 # plt.xlabel(r'$' + names[1] + '$')
119 # plt.ylabel(r'$' + names[2] + '$')
120 # plt.legend()
121 # plt.tight_layout()
122 # plt.show(block=False)
123 #
124 #
125 # def numAndExpData(numFiles, p0, q0, n0, e_a0, e_r0):
126 # e_v0 = np.array(e_a0) + 2. * np.array(e_r0)
127 # e_a1, e_r11, e_r21, n1, q1, p1 = np.genfromtxt(numFiles[0]).transpose()
128 # e_v1 = e_a1 + e_r11 + e_r21
129 # e_a2, e_r12, e_r22, n2, q2, p2 = np.genfromtxt(numFiles[1]).transpose()
130 # e_v2 = e_a2 + e_r12 + e_r22
131 # e_a3, e_r13, e_r23, n3, q3, p3 = np.genfromtxt(numFiles[2]).transpose()
132 # e_v3 = e_a3 + e_r13 + e_r23
133 #
134 # plt.figure(1)
135 # plt.plot(e_a0, 'bo', e_a1, '-', e_a2, '--', e_a3, '-.')
136 # plt.xlabel('Simulation step', fontsize=18)
137 # plt.ylabel('Axial strain (\%)', fontsize=18)
138 # plt.xticks(fontsize=16)
139 # plt.yticks(fontsize=16)
140 # plt.grid(True)
141 # plt.savefig('e_a.png')
142 #
143 # plt.figure(2)
144 # plt.plot(e_v0, 'bo', e_v1, '-', e_v2, '--', e_v3, '-.')
145 # plt.xlabel('Simulation step', fontsize=18)
146 # plt.ylabel('Volumetric strain', fontsize=18)
147 # plt.xticks(fontsize=16)
148 # plt.yticks(fontsize=16)
149 # plt.grid(True)
150 # plt.savefig('e_v.png')
151 #
152 # plt.figure(3)
153 # plt.plot(n0, 'bo', n1, '-', n2, '--', n3, '-.')
154 # plt.xlabel('Simulation step', fontsize=18)
155 # plt.ylabel('Porosity', fontsize=18)
156 # plt.xticks(fontsize=16)
157 # plt.yticks(fontsize=16)
158 # plt.grid(True)
159 # plt.savefig('n.png')
160 #
161 # plt.figure(4)
162 # plt.plot(p0, 'bo', p1, '-', p2, '--', p3, '-.')
163 # plt.xlabel('Simulation step', fontsize=18)
164 # plt.ylabel('Mean stress (MPa)', fontsize=18)
165 # plt.xticks(fontsize=16)
166 # plt.yticks(fontsize=16)
167 # plt.grid(True)
168 # plt.savefig('p.png')
169 #
170 # plt.figure(5)
171 # plt.plot(q0, 'bo', q1, '-', q2, '--', q3, '-.')
172 # plt.xlabel('Simulation step', fontsize=18)
173 # plt.ylabel('Deviatoric stress', fontsize=18)
174 # plt.xticks(fontsize=16)
175 # plt.yticks(fontsize=16)
176 # plt.grid(True)
177 # plt.savefig('q.png')
178 #
179 #
180 # def plotExpAndNum(name, names, iterNO, weight, mcFiles, numFiles, label1, label2, label3, label4, p, q, n, e_a, e_r):
181 # params = {'lines.linewidth': 1.0, 'backend': 'ps', 'axes.labelsize': 10, 'font.size': 10, 'legend.fontsize': 9,
182 # 'xtick.labelsize': 9, 'ytick.labelsize': 9, 'text.usetex': False, 'font.family': 'serif',
183 # 'legend.edgecolor': 'k', 'legend.fancybox': False}
184 # # matplotlib.rcParams['text.latex.preamble'] = r"\usepackage{amsmath}"
185 # matplotlib.rcParams.update(params)
186 # titles = ['(a)', '(b)']
187 # if name[-2:] == 'I2': e_a = (np.array(e_a) + 2 * np.array(e_r)) / 3.
188 #
189 # # get weight and turning points in the graphs
190 # nSample, numOfObs = weight.shape
191 # turns = [1, 30, 56, 80, numOfObs]
192 #
193 # # prepare ensembles
194 # enP = np.zeros([nSample, numOfObs])
195 # enQ = np.zeros([nSample, numOfObs])
196 # enN = np.zeros([nSample, numOfObs])
197 # enC = np.zeros([nSample, numOfObs])
198 # for i in range(nSample):
199 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(mcFiles[i]).transpose()
200 # enP[i, :] = p1
201 # enQ[i, :] = q1
202 # enN[i, :] = n1
203 # enC[i, :] = CN
204 # # get ensemble average
205 # enAvgP = np.zeros(numOfObs)
206 # enStdP = np.zeros(numOfObs)
207 # enAvgQPRatio = np.zeros(numOfObs)
208 # enStdQPRatio = np.zeros(numOfObs)
209 # enAvgN = np.zeros(numOfObs)
210 # enStdN = np.zeros(numOfObs)
211 # enAvgC = np.zeros(numOfObs)
212 # enStdC = np.zeros(numOfObs)
213 # for i in range(numOfObs):
214 # enAvgP[i] = enP[:, i].dot(weight[:, i])
215 # enAvgQPRatio[i] = (enQ / enP)[:, i].dot(weight[:, i])
216 # enAvgN[i] = enN[:, i].dot(weight[:, i])
217 # enAvgC[i] = enC[:, i].dot(weight[:, i])
218 # # get diagonal variance
219 # for i in range(numOfObs):
220 # enStdP[i] = ((enP[:, i] - enAvgP[i]) ** 2).dot(weight[:, i])
221 # enStdQPRatio[i] = (((enQ / enP)[:, i] - enAvgQPRatio[i]) ** 2).dot(weight[:, i])
222 # enStdN[i] = ((enN[:, i] - enAvgN[i]) ** 2).dot(weight[:, i])
223 # enStdC[i] = ((enC[:, i] - enAvgC[i]) ** 2).dot(weight[:, i])
224 # # get standard deviation
225 # enStdP = np.sqrt(enStdP)
226 # enStdQPRatio = np.sqrt(enStdQPRatio)
227 # enStdN = np.sqrt(enStdN)
228 # enStdC = np.sqrt(enStdC)
229 #
230 # fig = plt.figure(figsize=(20 / 2.54, 5 / 2.54))
231 # ax = fig.add_subplot(1, 2, 1)
232 # lines = []
233 # ls = ['-', '-.', ':', '-']
234 # for i in range(len(numFiles)):
235 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(numFiles[i]).transpose()
236 # e_v1 = e_a1 + e_r11 + e_r21
237 # l2, = ax.plot(100 * e_a1, q1 / p1,
238 # label=r'$E_c$' + '=%.1f GPa, ' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f, ' % label2[
239 # i] + r'$k_m$' + '=%.3f$\times10^{-3}$ N$\cdot$mm, ' % (
240 # label3[i] / 1e3) + r'$\eta_m$' + '=%.3f' % label4[i], ls=ls[i], color='k')
241 # lines.append(l2)
242 # l1, = ax.plot(e_a, np.array(q) / np.array(p), 'o', color='darkblue', ms=2.5)
243 # lines.append(l1)
244 # l0, = ax.plot(e_a, enAvgQPRatio, '-', color='darkred')
245 # lines.append(l0)
246 # lowBound = enAvgQPRatio - 2 * enStdQPRatio
247 # upBound = enAvgQPRatio + 2 * enStdQPRatio
248 # for front, back in zip(turns[:-1], turns[1:]):
249 # ax.fill_between(e_a[front - 1:back], lowBound[front - 1:back], upBound[front - 1:back], color='darkred',
250 # alpha=0.3, linewidth=0.0)
251 # anchored_text = AnchoredText(titles[0], loc=2, frameon=False, pad=-0.3)
252 # ax.add_artist(anchored_text)
253 # ax.set_xlabel(r'$\varepsilon_a$ (\%)')
254 # ax.set_ylabel(r'$q/p$')
255 # ax.set_xlim(xmin=0)
256 # ax.grid(True)
257 #
258 # ax = fig.add_subplot(1, 2, 2)
259 # for i in range(len(numFiles)):
260 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(numFiles[i]).transpose()
261 # e_v1 = e_a1 + e_r11 + e_r21
262 # l2, = ax.plot(1 - n1[1:], p1[1:],
263 # label=r'$E_c$' + '=%.1f GPa, ' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f, ' % label2[
264 # i] + r'$k_m$' + '=%.3f$\times10^{-3}$ N$\cdot$mm, ' % (
265 # label3[i] / 1e3) + r'$\eta_m$' + '=%.3f' % label4[i], ls=ls[i], color='k')
266 # l1, = ax.plot(1 - np.array(n), p, 'o', color='darkblue', label='Experimental data', ms=2.5)
267 # l0, = ax.plot(1 - enAvgN, enAvgP, '-', color='darkred')
268 # lowBound = enAvgP - 2 * enStdP
269 # upBound = enAvgP + 2 * enStdP
270 # for front, back in zip(turns[:-1], turns[1:]):
271 # ax.fill_between(1 - enAvgN[front - 1:back], lowBound[front - 1:back], upBound[front - 1:back], color='darkred',
272 # alpha=0.3, linewidth=0.0)
273 # anchored_text = AnchoredText(titles[1], loc=2, frameon=False, pad=-0.3)
274 # ax.add_artist(anchored_text)
275 # ax.set_ylabel(r'$p$ (MPa)')
276 # ax.set_xlabel(r'1$-n$')
277 # ax.set_xticks([0.61, 0.615, 0.62])
278 # ax.set_ylim(ymin=0)
279 # ax.grid(True)
280 #
281 # fig.legend(tuple(lines), tuple([r'$E_c$' + '=%.1f GPa, ' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f\n' % label2[
282 # i] + r'$k_m$' + r'=%.3f$\times10^{-3}$ N$\cdot$mm' % (label3[i] / 1e3) + '\n' + r'$\eta_m$' + '=%.3f' % label4[
283 # i] for i in range(len(numFiles))] + ['Experimental data'] + [
284 # 'Posterior ensemble']), loc='right', ncol=1, handlelength=1.45, labelspacing=0.8,
285 # frameon=False)
286 # fig.subplots_adjust(left=0.06, bottom=0.20, right=0.74, top=0.98, hspace=0, wspace=0.35)
287 # plt.savefig('expAndDEM' + iterNO + '.pdf')
288 # plt.show(block=False)
289 #
290 # return enAvgP, enStdP, enAvgQPRatio, enStdQPRatio, enAvgN, enStdN, enAvgC, enStdC
291 
292 
293 # def plotExpSequence(name, names, varsDir, numFiles, label1, label2, label3, label4, p, q, n, e_a, e_r):
294 # for i in range(55):
295 # titles = ['(a)', '(b)']
296 # if name[-2:] == 'I2': e_a = (np.array(e_a) + 2 * np.array(e_r)) / 3.
297 # fig = plt.figure(figsize=(15 / 2.54, 6.25 / 2.54))
298 # ax = fig.add_subplot(1, 2, 1)
299 # l1, = ax.plot(e_a[:56], np.array(q[:56]) / np.array(p[:56]), '-', color='gray')
300 # ax.plot(e_a[i], np.array(q[i]) / np.array(p[i]), '^', color='k')
301 # lines = [l1]
302 # ls = ['-', '--', '-.', ':']
303 # anchored_text = AnchoredText(titles[0], loc=2, frameon=False, pad=-0.3)
304 # ax.add_artist(anchored_text)
305 # ax.set_xlabel(r'$\varepsilon_a$ (\%)')
306 # ax.set_ylabel(r'$q/p$')
307 # ax.set_xlim(xmin=0)
308 # ax.grid(True)
309 # ax = fig.add_subplot(1, 2, 2)
310 # l1, = ax.plot(p[:56], n[:56], '-', color='gray', label='Experimental data', ms=4)
311 # ax.plot(p[i], n[i], '^', color='k')
312 # anchored_text = AnchoredText(titles[1], loc=2, frameon=False, pad=-0.3)
313 # ax.add_artist(anchored_text)
314 # ax.set_xlabel(r'$p$ (MPa)')
315 # ax.set_ylabel(r'$n$')
316 # ax.set_xlim(xmin=0)
317 # ax.grid(True)
318 # fig.subplots_adjust(left=0.09, bottom=0.18, right=0.98, top=0.98, hspace=0, wspace=0.35)
319 # # ~ plt.savefig('%02d.png'%i,dpi=300)
320 # plt.show(block=False)
321 
322 
323 # def plotExpAndNumHalfPage(name, names, varsDir, numFiles, label1, label2, label3, label4, p, q, n, e_a, e_r):
324 # params = {'lines.linewidth': 1.5, 'backend': 'ps', 'axes.labelsize': 17.7, 'font.size': 17.7, 'legend.fontsize': 15,
325 # 'xtick.labelsize': 15, 'ytick.labelsize': 15, 'text.usetex': False, 'font.family': 'serif',
326 # 'legend.edgecolor': 'k', 'legend.fancybox': False}
327 # # matplotlib.rcParams['text.latex.preamble'] = r"\usepackage{amsmath}"
328 # matplotlib.rcParams.update(params)
329 # titles = ['(a)', '(b)']
330 # if name[-2:] == 'I2': e_a = (np.array(e_a) + 2 * np.array(e_r)) / 3.
331 # fig = plt.figure(figsize=(16 / 2.54, 14 / 2.54))
332 #
333 # ax = fig.add_subplot(2, 1, 1)
334 # l1, = ax.plot(e_a, np.array(q) / np.array(p), 'o', color='darkred')
335 # lines = [l1]
336 # ls = ['-', '--', '-.', ':']
337 # for i in range(len(numFiles)):
338 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(numFiles[i]).transpose()
339 # e_v1 = e_a1 + e_r11 + e_r21
340 # l2, = ax.plot(100 * e_a1, q1 / p1,
341 # label=r'$E_c$' + '=%.1f, ' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f, ' % label2[
342 # i] + r'$k_m$' + '=%.3f, ' % (label3[i] / 1e3) + r'$\eta_m$' + '=%.3f' % label4[i], ls=ls[i],
343 # color='k')
344 # lines.append(l2)
345 # anchored_text = AnchoredText(titles[0], loc=2, frameon=False, pad=-0.3)
346 # ax.add_artist(anchored_text)
347 # ax.set_xlabel(r'$\varepsilon_a$ (\%)')
348 # ax.set_ylabel(r'$q/p$')
349 # ax.set_xlim(xmin=0)
350 # ax.grid(True)
351 #
352 # ax = fig.add_subplot(2, 1, 2)
353 # l1, = ax.plot(p, n, 'o', color='darkred', label='Experimental data')
354 # for i in range(len(numFiles)):
355 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(numFiles[i]).transpose()
356 # e_v1 = e_a1 + e_r11 + e_r21
357 # l2, = ax.plot(p1[1:], n1[1:], label=r'$E_c$' + '=%.1f, ' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f, ' % label2[
358 # i] + r'$k_m$' + '=%.3f, ' % (label3[i] / 1e3) + r'$\eta_m$' + '=%.3f' % label4[i], ls=ls[i], color='k')
359 #
360 # anchored_text = AnchoredText(titles[1], loc=2, frameon=False, pad=-0.3)
361 # ax.add_artist(anchored_text)
362 # ax.set_xlabel(r'$p$ (MPa)')
363 # ax.set_ylabel(r'$n$')
364 # ax.set_xlim(xmin=0)
365 # ax.grid(True)
366 #
367 # fig.legend(tuple(lines), tuple(['Experimental\ndata'] + [
368 # r'$E_c$' + '=%.1f\n' % (label1[i] / 1e9) + r'$\mu$' + '=%.3f\n' % label2[i] + r'$k_m$' + '=%.3f\n' % (
369 # label3[i] / 1e3) + r'$\eta_m$' + '=%.3f' % label4[i] for i in range(len(numFiles))]), loc='right',
370 # ncol=1, handlelength=1.45, labelspacing=1.5, frameon=False)
371 # fig.subplots_adjust(left=0.13, bottom=0.105, right=0.66, top=0.98, hspace=0.4, wspace=0.35)
372 # # ~ plt.savefig('expAndDEM'+varsDir[-2]+'.png',dpi=600)
373 # # plt.savefig('expAndDEM'+varsDir[-2]+'.tif',dpi=600)
374 # # plt.savefig('expAndDEM'+varsDir[-2]+'.pdf',dpi=600)
375 # plt.show(block=False)
376 
377 
378 # def microMacroPDF(name, step, pData, varsDir, weight, mcFiles, loadWeights=False):
379 # import seaborn as sns
380 # from resample import unWeighted_resample
381 # params = {'lines.linewidth': 1.5, 'backend': 'ps', 'axes.labelsize': 18, 'font.size': 18, 'legend.fontsize': 16,
382 # 'xtick.labelsize': 16, 'ytick.labelsize': 16, 'text.usetex': False, 'font.family': 'serif',
383 # 'legend.edgecolor': 'k', 'legend.fancybox': False}
384 # # matplotlib.rcParams['text.latex.preamble'] = r"\usepackage{amsmath}"
385 # matplotlib.rcParams.update(params)
386 #
387 # # read in the original sampled parameter set
388 # Xlabels = [r'$E_c$' + r' $(\mathrm{GPa})$', r'$\mu$', r'$k_m$' + r' $(\mu\mathrm{Nm)}$', r'$\eta_m$']
389 # Ylabels = [r'$p$' + r' $(\mathrm{MPa})$', r'$q/p$', r'$n$', r'$C^*$']
390 # if loadWeights:
391 # wPerPair = np.load(varsDir + '/wPerPair%i.npy' % step).item()
392 # else:
393 # wPerPair = {}
394 #
395 # # Monte Carlo sequence
396 # nSample, numOfObs = weight.shape
397 # enQOIs = np.zeros([4, nSample])
398 # for i in range(nSample):
399 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(mcFiles[i]).transpose()
400 # enQOIs[0, i] = p1[step]
401 # enQOIs[1, i] = q1[step] / p1[step]
402 # enQOIs[2, i] = n1[step]
403 # enQOIs[3, i] = CN[step]
404 # # importance resampling
405 # ResampleIndices = unWeighted_resample(weight[:, step], nSample * 10)
406 # particles = np.copy(pData)
407 # particles[0, :] /= 1e9
408 # particles[2, :] /= 1e3
409 # particles = particles[:, ResampleIndices]
410 # enQOIs = enQOIs[:, ResampleIndices]
411 # # remove porosity as QOI if iterNO > 0
412 # if 'iterPF0' not in mcFiles[0]:
413 # enQOIs = np.vstack([enQOIs[:2, :], enQOIs[-1, :]])
414 # for i in [9, 10, 11, 12]: wPerPair[i] = wPerPair[i + 4]
415 # Ylabels = [r'$p$' + r' $(\mathrm{MPa})$', r'$q/p$', r'$C^*$']
416 #
417 # # plot figures
418 # figNO = 0
419 # fig = plt.figure('microMacroUQ_iterPF' + varsDir[-1] + '_%i' % step, figsize=(12 / 2.54, 12 / 2.54))
420 # sns.set(style="ticks", rc=matplotlib.rcParams)
421 # for i in range(enQOIs.shape[0]):
422 # for j in range(particles.shape[0]):
423 # figNO += 1
424 # # plot histograms at the initial and final steps
425 # ax = fig.add_subplot(enQOIs.shape[0], 4, figNO)
426 # cmap = sns.dark_palette("black", as_cmap=True)
427 # # ~ ax = sns.kdeplot(,,cmap=cmap,kernel='gau',cut=3,n_levels=20,bw='silverman',linewidths=0.8)
428 # # ax.grid()
429 # if loadWeights:
430 # minScore, maxScore, p0, w0 = wPerPair[figNO]
431 # else:
432 # data = np.array([particles[j], enQOIs[i]])
433 # pMin = [min(particles[j]), min(enQOIs[i])]
434 # pMax = [max(particles[j]), max(enQOIs[i])]
435 # minScore, maxScore, p0, w0 = getLogWeightFromGMM(data.T, pMin, pMax, 1e-2)
436 # wPerPair[figNO] = (minScore, maxScore, p0, w0)
437 # X, Y = p0
438 # plt.contour(X, Y, w0, cmap=cmap, levels=np.linspace(minScore - abs(minScore) * 0.1, maxScore, 10),
439 # linewidths=0.7)
440 # plt.grid()
441 # ax.locator_params(axis='both', nbins=2)
442 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
443 # left='off', labelleft='off')
444 # if i == enQOIs.shape[0] - 1:
445 # ax.set_xlabel(Xlabels[j], size=params['font.size'], labelpad=10)
446 # ax.tick_params(axis='both', which='both', bottom='on', top='off', labelbottom='on', right='off',
447 # left='off', labelleft='off', labelsize=params['xtick.labelsize'])
448 # if j == 0:
449 # ax.set_ylabel(Ylabels[i], size=params['font.size'], labelpad=10)
450 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
451 # left='on', labelleft='on', labelsize=params['xtick.labelsize'])
452 # if i == enQOIs.shape[0] - 1 and j == 0:
453 # ax.tick_params(axis='both', which='both', bottom='on', top='off', labelbottom='on', right='off',
454 # left='on', labelleft='on', labelsize=params['xtick.labelsize'])
455 # if i != enQOIs.shape[0] - 1 and j != 0:
456 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
457 # left='off', labelleft='off')
458 # xMin, xMax = ax.get_xlim()
459 # yMin, yMax = ax.get_ylim()
460 # dx = (xMax - xMin) / 4
461 # dy = (yMax - yMin) / 4
462 # ax.set_xticks(np.round([xMin + dx, xMax - dx], int(np.ceil(-np.log10(dx)))))
463 # ax.set_yticks(np.round([yMin + dy, yMax - dy], int(np.ceil(-np.log10(dy)))))
464 # if not loadWeights: np.save(varsDir + '/wPerPair%i.npy' % step, wPerPair)
465 # plt.tight_layout()
466 # fig.subplots_adjust(left=0.21, bottom=0.16, right=0.99, top=0.99, hspace=0., wspace=0.)
467 # plt.savefig('microMacroUQ_iterPF' + varsDir[-1] + '_%i.pdf' % step, dpi=600)
468 # # ~ plt.show()
469 # return wPerPair
470 
471 
472 # kde estimation
473 # def getPDF(pDataResampled, pMin, pMax):
474 # kde = stats.gaussian_kde(pDataResampled)
475 # pDataNew = np.linspace(pMin, pMax, 2000)
476 # wDataNew = kde(pDataNew)
477 # wDataNew[0], wDataNew[-1] = 0, 0
478 # return pDataNew, wDataNew
479 
480 
481 # def macroMacroPDF(name, step, pData, varsDir, weight, mcFiles):
482 # import seaborn as sns
483 # from resample import unWeighted_resample
484 # params = {'lines.linewidth': 1.5, 'backend': 'ps', 'axes.labelsize': 20, 'font.size': 20, 'legend.fontsize': 18,
485 # 'xtick.labelsize': 18, 'ytick.labelsize': 18, 'text.usetex': False, 'font.family': 'serif',
486 # 'legend.edgecolor': 'k', 'legend.fancybox': False}
487 # # matplotlib.rcParams['text.latex.preamble'] = r"\usepackage{amsmath}"
488 # matplotlib.rcParams.update(params)
489 #
490 # # read in the original sampled parameter set
491 # labels = [r'$p$' + r' $(\mathrm{MPa})$', r'$q/p$', r'$n$', r'$C^*$']
492 #
493 # # Monte Carlo sequence
494 # nSample, numOfObs = weight.shape
495 # enQOIs = np.zeros([4, nSample])
496 # for i in range(nSample):
497 # C, CN, K0, e_r11, e_r21, e_a1, n1, overlap, p1, q1 = np.genfromtxt(mcFiles[i]).transpose()
498 # enQOIs[0, i] = p1[step]
499 # enQOIs[1, i] = q1[step] / p1[step]
500 # enQOIs[2, i] = n1[step]
501 # enQOIs[3, i] = CN[step]
502 # # importance resampling
503 # ResampleIndices = unWeighted_resample(weight[:, step], nSample * 10)
504 # sortedIndex = np.argsort(weight[:, step])
505 # particles = np.copy(pData)
506 # particles[0, :] /= 1e9
507 # particles[2, :] /= 1e3
508 # particles = particles[:, ResampleIndices]
509 # enQOIs = enQOIs[:, ResampleIndices]
510 #
511 # # plot figures
512 # figNO = 0
513 # fig = plt.figure(figsize=(18 / 2.54, 18 / 2.54))
514 # sns.set(style="ticks", rc=matplotlib.rcParams)
515 # for i in range(4):
516 # for j in range(4):
517 # figNO += 1
518 # # plot histograms at the initial and final steps
519 # ax = fig.add_subplot(4, 4, figNO)
520 # if i == j:
521 # # estimate pdf using gaussian kernals
522 # p0, w0 = getPDF(enQOIs[i, :], min(enQOIs[i, :]), max(enQOIs[i, :]))
523 # ax2 = ax.twinx()
524 # ax2.fill_between(p0, w0, alpha=0.63, facecolor='black')
525 # ax2.tick_params(axis='y', right='off', labelright='off')
526 # ax.set_ylim(min(p0), max(p0))
527 # elif j < i:
528 # cmap = sns.dark_palette("black", as_cmap=True)
529 # ax = sns.kdeplot(enQOIs[j], enQOIs[i], cmap=cmap, kernel='gau', n_levels=20, bw='silverman',
530 # linewidths=0.8)
531 # ax.grid()
532 # ax.locator_params(axis='both', tight=True, nbins=2)
533 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
534 # left='off', labelleft='off')
535 # ax.set_xlim(min(enQOIs[j, :]), max(enQOIs[j, :]))
536 # ax.set_ylim(min(enQOIs[i, :]), max(enQOIs[i, :]))
537 # elif j > i:
538 # cmap = sns.dark_palette("black", as_cmap=True)
539 # ax.scatter(enQOIs[j, sortedIndex], enQOIs[i, sortedIndex], s=25, c=weight[sortedIndex, step],
540 # cmap='Greys')
541 # ax.grid()
542 # ax.locator_params(axis='both', tight=True, nbins=2)
543 # ax.set_xlim(min(enQOIs[j, :]), max(enQOIs[j, :]))
544 # ax.set_ylim(min(enQOIs[i, :]), max(enQOIs[i, :]))
545 # if i == 3:
546 # ax.set_xlabel(labels[j], size=params['font.size'], labelpad=10)
547 # ax.tick_params(axis='both', which='both', bottom='on', top='off', labelbottom='on', right='off',
548 # left='off', labelleft='off', labelsize=params['xtick.labelsize'])
549 # if j == 0:
550 # ax.set_ylabel(labels[i], size=params['font.size'], labelpad=10)
551 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
552 # left='on', labelleft='on', labelsize=params['xtick.labelsize'])
553 # if i == 3 and j == 0:
554 # ax.tick_params(axis='both', which='both', bottom='on', top='off', labelbottom='on', right='off',
555 # left='on', labelleft='on', labelsize=params['xtick.labelsize'])
556 # if i != 3 and j != 0:
557 # ax.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off',
558 # left='off', labelleft='off')
559 #
560 # plt.tight_layout()
561 # fig.subplots_adjust(left=0.165, bottom=0.12, right=0.98, top=0.99, hspace=0.1, wspace=0.1)
562 # plt.savefig('macroMacroUQ_' + varsDir[-8:-1] + '_%i.pdf' % step, dpi=600)
563 # plt.show(block=False)
564 
565 
566 # def plot3DScatter(xName, yName, zName, x, y, z):
567 # # from mpl_toolkits.mplot3d import Axes3D
568 # fig = plt.figure()
569 # ax = fig.add_subplot(111, projection='3d')
570 # ax.scatter(x, y, z)
571 # ax.set_xlabel(xName)
572 # ax.set_ylabel(yName)
573 # ax.set_zlabel(zName)
574 # # ~ ax.set_zlim([-5-0.1,-5+0.1])
575 # # ~ ax.set_xlim([0.3,0.4])
576 # plt.show(block=False)
577 #
578 #
579 # def polySmooth(y):
580 # x = np.arange(len(y))
581 # yhat = savitzky_golay(y, 51, 10) # window size 51, polynomial order 3
582 # return yhat
583 
584 
585 # def savitzky_golay(y, window_size, order, deriv=0, rate=1):
586 # r"""Smooth (and optionally differentiate) data with a Savitzky-Golay filter.
587 # The Savitzky-Golay filter removes high frequency noise from data.
588 # It has the advantage of preserving the original shape and
589 # features of the signal better than other types of filtering
590 # approaches, such as moving averages techniques.
591 # Parameters
592 # ----------
593 # y : array_like, shape (N,)
594 # the values of the time history of the signal.
595 # window_size : int
596 # the length of the window. Must be an odd integer number.
597 # order : int
598 # the order of the polynomial used in the filtering.
599 # Must be less then `window_size` - 1.
600 # deriv: int
601 # the order of the derivative to compute (default = 0 means only smoothing)
602 # Returns
603 # -------
604 # ys : ndarray, shape (N)
605 # the smoothed signal (or it's n-th derivative).
606 # Notes
607 # -----
608 # The Savitzky-Golay is a type of low-pass filter, particularly
609 # suited for smoothing noisy data. The main idea behind this
610 # approach is to make for each point a least-square fit with a
611 # polynomial of high order over a odd-sized window centered at
612 # the point.
613 # Examples
614 # --------
615 # t = np.linspace(-4, 4, 500)
616 # y = np.exp( -t**2 ) + np.random.normal(0, 0.05, t.shape)
617 # ysg = savitzky_golay(y, window_size=31, order=4)
618 # import matplotlib.pyplot as plt
619 # plt.plot(t, y, label='Noisy signal')
620 # plt.plot(t, np.exp(-t**2), 'k', lw=1.5, label='Original signal')
621 # plt.plot(t, ysg, 'r', label='Filtered signal')
622 # plt.legend()
623 # plt.show()
624 # References
625 # ----------
626 # .. [1] A. Savitzky, M. J. E. Golay, Smoothing and Differentiation of
627 # Data by Simplified Least Squares Procedures. Analytical
628 # Chemistry, 1964, 36 (8), pp 1627-1639.
629 # .. [2] Numerical Recipes 3rd Edition: The Art of Scientific Computing
630 # W.H. Press, S.A. Teukolsky, W.T. Vetterling, B.P. Flannery
631 # Cambridge University Press ISBN-13: 9780521880688
632 # """
633 # import numpy as np
634 # from math import factorial
635 #
636 # try:
637 # window_size = np.abs(np.integer(window_size))
638 # order = np.abs(np.integer(order))
639 # except ValueError(msg):
640 # raise ValueError("window_size and order have to be of type int")
641 # if window_size % 2 != 1 or window_size < 1:
642 # raise TypeError("window_size size must be a positive odd number")
643 # if window_size < order + 2:
644 # raise TypeError("window_size is too small for the polynomials order")
645 # order_range = range(order + 1)
646 # half_window = (window_size - 1) // 2
647 # # precompute coefficients
648 # b = np.mat([[k ** i for i in order_range] for k in range(-half_window, half_window + 1)])
649 # m = np.linalg.pinv(b).A[deriv] * rate ** deriv * factorial(deriv)
650 # # pad the signal at the extremes with
651 # # values taken from the signal itself
652 # firstvals = y[0] - np.abs(y[1:half_window + 1][::-1] - y[0])
653 # lastvals = y[-1] + np.abs(y[-half_window - 1:-1][::-1] - y[-1])
654 # y = np.concatenate((firstvals, y, lastvals))
655 # return np.convolve(m[::-1], y, mode='valid')
656 
657 
658 # ------
659 # TW's plotting functions
660 

Member Data Documentation

◆ bestId

plotResults.Analysis.bestId

◆ obsNames

plotResults.Analysis.obsNames

◆ paramNames

plotResults.Analysis.paramNames

◆ posterior

◆ smcSamples

◆ smcTest

plotResults.Analysis.smcTest

The documentation for this class was generated from the following file: