iam-git / WellMet (public) (License: MIT) (since 2021-08-31) (hash sha1)
WellMet is pure Python framework for spatial structural reliability analysis. Or, more specifically, for "failure probability estimation and detection of failure surfaces by adaptive sequential decomposition of the design domain".
List of commits:
Subject Hash Author Date (UTC)
simplex: implement separability-based sensibility analysis (new brand _Sense class) 9c5d58f2301893ceaec1b0e90bff76035cfa15b2 I am 2023-02-23 18:49:11
dicebox.circumtri.CirQTri: switch to GaussCubatureIntegration 5b52dd25cb7a997af4953230116deb9efc577d56 I am 2023-02-11 04:32:48
simplex: implement GaussCubatureIntegration in the most memory-friendly way 689d253ae7e2a22242258fd5bef0a069caf7cf75 I am 2023-02-11 04:31:11
convex_hull.QHullCubature: implement memory-friendly outside (chi) integration ad8210a04b1e0903de7435cad16b1304707d0e6e I am 2023-02-09 22:22:05
qt_gui.qt_plot: require box recommendation to show next point 6f726047f7f08e884359020eaa1eac6f6cc125d2 I am 2023-02-09 11:51:44
dicebox.circumtri.CirQTri.get_circum_node: limit circumcenter by explore radius, not by just R 136ec73bb06da16c1f2bce64b3c349be4c8ba975 I am 2023-02-09 03:09:51
dicebox.circumtri: implement new brand CirQTri box 5879b8ad6317c65aa9481b59f76b6159f19e247a I am 2023-02-09 01:29:10
simplex.FullCubatureIntegration: store simplex probabilities in sorted dicts c0da90731ff3ede47d9b4eec0ad9b28a29027167 I am 2023-02-09 01:23:14
dicebox.circumtri: exploratory: even better idea 811ab11cd7172ff4a3807992f4928be2e8068ec0 I am 2023-02-08 15:31:23
dicebox.circumtri: exploratory, new idea 526d3f6185887ff48a341b0705d74dde4d15ca87 I am 2023-02-08 03:03:41
dicebox.circumtri: exploratory 806063d2e223c812280dc4845153450dd47faed3 I am 2023-02-06 17:15:15
dicebox.circumtri: exploratory efed2589f642d502f30e80f0e9b45dfeecd1c7c7 I am 2023-02-06 13:40:24
dicebox.circumtri: exploratory - again 34d3f4e47420e1c1e26b09570fb44d3270194751 I am 2023-02-06 12:50:45
qt_gui.qt_dicebox: change default q of circumtri classes 9fd5855e5d7cacf80d27fb383dd18a92d60e138b I am 2023-02-06 12:30:27
dicebox.circumtri: tune up exploratory rule one more time bfaa8d65bd13956252a6a25382c621aca7a33e3f I am 2023-02-06 12:05:00
convex_hull.QHull.is_outside: mark everything as outside until convex hull is created beecd924a63603868807a8a603cbeb04857e5cde I am 2023-02-06 12:03:35
dicebox.circumtri: get_exploratory_radius: turn it up some more ecd638ca90bac88df515cf803a103e350b08041e I am 2023-02-03 10:58:37
mplot: ongoing ad-hoc changes fd71b93abee959231ffd76324cf2fbc0b3bbbdfe I am 2023-02-02 17:59:27
dicebox.circumtri: change default q setting 2a9bdc6604113462a935d409bca429b49ac52cbc I am 2023-02-02 11:55:41
dicebox.circumtri: minor change in explore radia 9aa06dc11be7c935dd47715c0aa53b401be951d4 I am 2023-02-01 23:37:13
Commit 9c5d58f2301893ceaec1b0e90bff76035cfa15b2 - simplex: implement separability-based sensibility analysis (new brand _Sense class)
Author: I am
Author date (UTC): 2023-02-23 18:49
Committer name: I am
Committer date (UTC): 2023-02-23 18:49
Parent(s): 5b52dd25cb7a997af4953230116deb9efc577d56
Signer:
Signing key:
Signing status: N
Tree: aab4be52bb2fb722f81b69d6bdd2da965dacf437
File Lines added Lines deleted
wellmet/simplex.py 299 2
File wellmet/simplex.py changed (mode: 100644) (index a272778..78b03d6)
... ... from . import f_models
9 9 from .candynodes import CandyNodes from .candynodes import CandyNodes
10 10 from scipy import spatial from scipy import spatial
11 11 from scipy import stats from scipy import stats
12 from scipy import optimize
12 13
13 14 import quadpy import quadpy
14 15
15 from collections import namedtuple
16 from collections import namedtuple, defaultdict
16 17 from sortedcollections import ValueSortedDict from sortedcollections import ValueSortedDict
17 18
18 19
 
... ... class DefaultDict(dict):
264 265 def __missing__(self, key): def __missing__(self, key):
265 266 return np.zeros(self.nvar, dtype=float) return np.zeros(self.nvar, dtype=float)
266 267
268
269
270 class _Sense:
271
272 def _init(self):
273 # I'll try to use dual name convention here
274 # self for this class, sensibility-related things
275 # sx for general Triangulation class related attributes
276 sx = self
277
278 nsim, nvar = sx.tri.points.shape
279
280 self._local_scope = dict()
281 self._to_parse = set()
282 self._parsed = set()
283
284 self.points = sx.tri.points
285 self.failsi = sx.sample_box.failsi
286 self.non_failsi = ~self.failsi
287 self._point_mask = np.empty_like(self.failsi)
288 self._scalars = np.empty(len(self.failsi))
289 self._boolmask = np.empty_like(self.failsi)
290
291
292
293 self.c_zeros = np.zeros(nvar + 1) # prices in terms of LP
294 self.b_ub = -np.ones(nsim)
295
296 self.A_ub = np.empty((nsim, nvar + 1))
297 self.A_ub[:, :-1] = sx.tri.points
298 self.A_ub[:, -1] = self.b_ub
299 self.A_ub *= (self.failsi * 2 - 1).reshape(-1, 1)
300
301
302 event_ids = np.empty(sx.tri.nsimplex + 1, dtype=np.int8)
303 event_ids[:-1] = sx.get_events()
304 event_ids[-1] = -1
305
306 self.neighbors_mask = event_ids[sx.tri.neighbors] == 2
307
308
309
310
311 #č přes liblinear to taky jde
312 #from liblinear import liblinearutil
313 #pr = liblinearutil.problem(box.failsi, box.G)
314 #param = liblinearutil.parameter('-s 2 -c 10000 -B 1')
315 #m = liblinearutil.train(pr, param)
316 #m.get_decfun()
317 def get_separation_axis(self, mask):
318 A_ub = self.A_ub[mask]
319 b_ub = self.b_ub[:len(A_ub)]
320 return optimize.linprog(self.c_zeros, A_ub=A_ub, b_ub=b_ub,
321 options={'presolve':False}, method='highs-ds',
322 bounds=(None, None))
323
324
325 def perform_sensibility_analysis(self):
326 # I'll try to use dual name convention here
327 # self for this class, sensibility-related things
328 # sx for general Triangulation class related attributes
329 sx = self
330
331 self._init()
332
333 simplices = sx.tri.simplices
334 mixed_mask = sx.is_mixed(simplices)
335 #nmixed = np.count_nonzero(mixed_mask)
336
337
338 mixed_probability = 0
339 global_gradient = np.zeros(sx.sample_box.nvar)
340 sensitivities = np.zeros(sx.sample_box.nvar)
341
342 depths = defaultdict(int)
343 vectors = dict()
344 probabilities = dict()
345
346 for simplex_id, is_mixed in zip(range(len(mixed_mask)), mixed_mask):
347 if not is_mixed:
348 continue
349
350 indices = simplices[simplex_id]
351
352 #č poslední složka vektoru je buď nvar+1 dimenze
353 #č v případě get_simplex_normal()
354 #č nebo offset aka bias
355 #
356 # the last item of vector is either nvar+1 coordinate
357 # in case of get_simplex_normal()
358 # or offset aka bias
359 # in case of separation axis
360 # i.e. we should ignore it anyway
361 depth = depths[simplex_id]
362 if depth:
363 vector = vectors[simplex_id]
364 else:
365 vector = sx.get_simplex_normal(indices)[:-1]
366
367 scope, normal = self.process_simplex(simplex_id, depth, vector)
368
369 # scale to unit length
370 length = np.sqrt(np.inner(normal, normal))
371 np.divide(normal, length, out=normal)
372
373 p_mixed = sx.get_simplex_probability(indices)
374 mixed_probability += p_mixed
375 probabilities[simplex_id] = p_mixed
376 global_gradient += normal * p_mixed
377 sensitivities += np.square(normal) * p_mixed
378
379 vectors[simplex_id] = normal
380 new_depth = scope[simplex_id]
381 depths[simplex_id] = new_depth
382
383 if new_depth > depth:
384 for id, new_depth in scope.items():
385 if (id > simplex_id) and (new_depth > depths[id]):
386 vectors[id] = normal
387 depths[id] = new_depth
388
389
390 #length = np.sqrt(np.inner(global_gradient, global_gradient))
391 #global_gradient = global_gradient / length
392
393 sensitivities /= mixed_probability
394
395 return global_gradient, sensitivities, \
396 probabilities, depths, vectors
397
398
399
400
401
402
403
404
405
406 def is_separable(self, vector, point_mask):
407 #č postup se míří na 5-6D,
408 #č kde je mou snahou zbytečně nealokovat
409 #č na každém kolečku
410 scalars = self._scalars
411 #č vector je již bez poslední složky
412 np.matmul(self.points, vector, out=scalars)
413 mask = self._boolmask
414
415 np.logical_and(point_mask, self.failsi, out=mask)
416 #č boolean indexing se dycky alokuje
417 reds = scalars[mask]
418 #min_red = np.min(reds)
419 max_red = np.max(reds)
420
421 np.logical_and(point_mask, self.non_failsi, out=mask)
422 #č boolean indexing se dycky alokuje
423 greens = scalars[mask]
424 min_green = np.min(greens)
425 #max_green = np.max(greens)
426
427 # ........... ,,,,,,,,,,,,,
428 # min_i max_i min_j max_j
429 #(max_green < min_red) != (max_red < min_green)
430
431 # let's use vector's convention red < green
432 return max_red < min_green
433
434
435 def process_simplex(self, simplex_id, depth, vector):
436 # self for this class, sensibility-related things
437 # sx for general Triangulation class related attributes
438 sx = self
439
440
441 local_scope = self._local_scope
442 local_scope.clear()
443 local_scope[simplex_id] = 0
444
445 to_parse = self._to_parse
446 to_parse.clear()
447
448 parsed = self._parsed
449 parsed.clear()
450
451 simplices = sx.tri.simplices
452 neighbors = sx.tri.neighbors
453 neighbors_mask = self.neighbors_mask
454
455 point_mask = self._point_mask
456 point_mask[:] = False
457
458 to_parse.update(neighbors[simplex_id][neighbors_mask[simplex_id]])
459 point_mask[simplices[simplex_id]] = True
460
461 for __i in range(depth):
462 for key in local_scope.keys():
463 local_scope[key] += 1
464
465 to_parse -= local_scope.keys()
466 to_parse, parsed = parsed, to_parse
467 to_parse.clear()
468 for id in parsed: # meant to be parsed in the cycle's end
469 to_parse.update(neighbors[id][neighbors_mask[id]])
470 point_mask[simplices[id]] = True
471 local_scope[id] = 0
472
473
474 assert local_scope[simplex_id] == depth
475
476 npoints_now = np.count_nonzero(point_mask)
477 assert self.is_separable(vector, point_mask)
478
479 #č běh smyčky je podmíněn tím, že má co dělat
480 to_parse -= local_scope.keys()
481 while len(to_parse): #č máme co dělat?
482
483 #č projedeme várku simplexu, přídáme je do slovníku,
484 #č "zapneme" přípojené k ním vzorky
485 to_parse, parsed = parsed, to_parse
486 to_parse.clear()
487 for id in parsed: # meant to be parsed in the cycle's end
488 to_parse.update(neighbors[id][neighbors_mask[id]])
489 point_mask[simplices[id]] = True
490 local_scope[id] = -1
491
492 #č zkontrolujeme separabilitu.
493 #č Nebude-li zajištěna, tak končíme s nedotknutými
494 #č hloubkou a vektorem
495 npoints_before, npoints_now = npoints_now, np.count_nonzero(point_mask)
496 #if npoints_now == npoints_before: print("Sense: no new points to separate")
497 if npoints_now > npoints_before:
498 if not self.is_separable(vector, point_mask):
499 result = self.get_separation_axis(point_mask)
500 if not result.success: #č konec, zde končíme
501 if result.status != 2: # Problem appears to be infeasible.
502 print("Sense: linprog ended with status %s" % result.status)
503 return local_scope, vector
504
505 #č hned odřízneme poslední složku -
506 #č bude tam posunutí b aka offcet aka bias
507 vector = result.x[:-1]
508
509 #č pokud jsme tu, tak vzorky byly separabilní,
510 #č je na čase "navysit" hloubku
511 for key in local_scope.keys():
512 local_scope[key] += 1
513
514 #č běh smyčky je podmíněn tím, že má co dělat
515 to_parse -= local_scope.keys()
516
517
518 return local_scope, vector
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
267 539
268 540
269 541
 
... ... class _Triangulation:
764 1036 return np.logical_xor(has_failure, all_failure) return np.logical_xor(has_failure, all_failure)
765 1037
766 1038
1039 def get_simplex_normal(sx, indices):
1040 nvar = sx.sample_box.nvar
1041 failsi = sx.sample_box.failsi
1042
1043 points = sx.tri.points
1044
1045 vectors = np.empty((nvar, nvar + 1))
1046 vectors[:, :-1] = points[indices[1:]] - points[indices[:1]]
1047 vectors[:, -1] = failsi[indices[1:]] #č numpy nedá boleans
1048 vectors[:, -1] -= failsi[indices[0]] #č jen tak odečíst
1049 basis, __ = np.linalg.qr(vectors.T, mode='complete')
1050
1051 normal = basis[:, -1]
1052
1053 #č ujistit se, že normály směrovany nahoru,
1054 #č tj. směrem k bezpečným vzorkám
1055 normal = normal * (1 - 2 * (normal[-1] < 0))
1056
1057 return normal
1058
1059
767 1060 def get_averaged_mixed_normals(sx, depth=2): def get_averaged_mixed_normals(sx, depth=2):
768 1061 simplices = sx.tri.simplices simplices = sx.tri.simplices
769 1062
 
... ... class FastCubatureIntegration(FullCubatureIntegration):
2024 2317
2025 2318
2026 2319
2027 class GaussCubatureIntegration(_Triangulation):
2320 class GaussCubatureIntegration(_Triangulation, _Sense):
2028 2321 def __init__(sx, sample_box, tn_scheme, incremental=True, full=False): def __init__(sx, sample_box, tn_scheme, incremental=True, full=False):
2029 2322
2030 2323 sx.tri_setup(sample_box, tri_space='G', incremental=incremental) sx.tri_setup(sample_box, tri_space='G', incremental=incremental)
 
... ... class GaussCubatureIntegration(_Triangulation):
2080 2373 print("Negative measure %s has occured in simplex %s" % (mean, indices)) print("Negative measure %s has occured in simplex %s" % (mean, indices))
2081 2374 return vol, 0 return vol, 0
2082 2375
2376 def get_simplex_probability(sx, indices):
2377 key = indices.tobytes()
2378 p_mixed, pfv, pfw, pf = sx.mixed_simplices[key]
2379 return p_mixed / sx._norm_pdf_C
2083 2380
2084 2381
2085 2382 def get_pf_estimation(sx): def get_pf_estimation(sx):
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/iam-git/WellMet

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/iam-git/WellMet

Clone this repository using git:
git clone git://git.rocketgit.com/user/iam-git/WellMet

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main