iam-git / WellMet (public) (License: MIT) (since 2021-08-31) (hash sha1)
WellMet is pure Python framework for spatial structural reliability analysis. Or, more specifically, for "failure probability estimation and detection of failure surfaces by adaptive sequential decomposition of the design domain".
List of commits:
Subject Hash Author Date (UTC)
voronoi: WIP cbb61d4544367a67c9a5c3fe6d64a9dc284eb364 I am 2022-10-18 05:05:44
voronoi: WIP e9c247fef30cfb678bad9b955d6f6a94a0ff61e7 I am 2022-10-18 01:02:30
candynodex: hotfix to support numpy masking indexing f359123b72a3d998d5eed4b9e611a5e402a18c8d I am 2022-10-17 09:02:33
add new lightweight candynodes module to replace old heavy ugly candybox ca97509f47c100df90e1e06d82ed7c759627bfd0 I am 2022-10-17 06:31:33
qt_gui: move CandyBox creating to the individual DiceBox widgets 29253625129219b5e550f82cae3da0e53ef5bd47 I am 2022-10-16 03:17:55
voronoi: WIP 4855190869e06df740cc05c64e8b27b2c3c5a88d I am 2022-10-15 22:46:51
voronoi: WIP 0298b3a0563587ea4fc24fa99b51d2e01d517203 I am 2022-10-15 20:00:25
voronoi: WIP 4b80757fa26e3d40dceac888480ec3d814f6abc9 I am 2022-10-15 17:30:11
voronoi: WIP 7cadd1d01d7f4bc3f51e785fe6ebf8cae6ff839a I am 2022-10-15 03:12:54
voronoi: clean up and comment out ContactSolver 543f8482a925ff6cf8485b9be616b5bd3c714d13 I am 2022-10-14 04:24:40
qt_gui.qt_gui: use ConvexSolver instead of ContactSolver (which did not work correctly) c1e49a9b2c3cc13886d317b460d7598f661cf216 I am 2022-10-14 04:01:06
voronoi: add fantastic ConvexSolver 2853826e5ef1abc79d3ac2fb8289b13c45211a31 I am 2022-10-14 04:00:14
qt_gui.qt_plot: add (finally!) Numbers class 4cdc658c0fcc857c23c0e39e91e7a1ce5e1b30a1 I am 2022-10-13 06:23:46
qt_gui.qt_gui: show contacts in distance matrix. Based on ContactSolver for now 21bd6101888d9f06d7d6a7c6ba2732ff30fdd68d I am 2022-10-13 04:16:41
voronoi: ContactSolver is ready. Ale je to na nic. Pomalá, potvora 28e2442b0101eac2407ed4a69c6f757ffd579cf1 I am 2022-10-13 04:14:59
voronoi: add some preliminary lines of ContactSolver. WIP 6a203b278c9fa6de7da75c402c80f78d40164fdf I am 2022-10-12 23:38:59
voronoi: přídat pár těžkých smyček. WIP 7b0ffb602ae00ab2f12dc98d34c35ec20afa3cc4 I am 2022-10-12 00:13:36
add to repo voronoi module. WIP f43768c22d88fac7c7095be02d8c9b5620153976 I am 2022-10-11 03:29:49
convex_hull.Grick: malý puntičkářský fix v get_r() cfa07257b9ebadc21f2bd295d35978072fac19e9 I am 2022-10-10 07:14:26
qt_gui.qt_plot: add Grick to Ghull widget d90b9a87c3ef0989951a45b2a22346381ce16f02 I am 2022-10-10 03:17:24
Commit cbb61d4544367a67c9a5c3fe6d64a9dc284eb364 - voronoi: WIP
Author: I am
Author date (UTC): 2022-10-18 05:05
Committer name: I am
Committer date (UTC): 2022-10-18 05:05
Parent(s): e9c247fef30cfb678bad9b955d6f6a94a0ff61e7
Signer:
Signing key:
Signing status: N
Tree: 0301d4d8fc5e94c3bfcf2324112c8411fd5a152a
File Lines added Lines deleted
wellmet/voronoi.py 295 18
File wellmet/voronoi.py changed (mode: 100644) (index 35fd8bb..5f798fd)
... ... class ContactVoronoi:
311 311 self.red_couples = {} ## {couple_indices: node_idx} self.red_couples = {} ## {couple_indices: node_idx}
312 312 self._add_indices_to_update(j) self._add_indices_to_update(j)
313 313 self._nodes_to_check_outsides = [] #č seznam indexů, projedu jednoduše smyčkou self._nodes_to_check_outsides = [] #č seznam indexů, projedu jednoduše smyčkou
314
315
316 Nodes pipeline:
317 0. Sampling: generate coordinates
318 1. _store_masked(): Filter out, create CandyNodes
319 and (optionally) assign dd1, dd2, ii1, ii2
320 2. assign weights (w) such as sum(w)=1 over entire domain
321 3. score(): assign eig, barycenter and trace-based so called "score"
314 322 """ """
315 323
316 324 def _update(self): def _update(self):
 
... ... class ContactVoronoi:
435 443 # #č a na ně vykašle # #č a na ně vykašle
436 444 # self._indices_to_update.add((max(j,k), min(j,k))) # self._indices_to_update.add((max(j,k), min(j,k)))
437 445
446 def _recommend_to_integrate(self, nodes_idx):
447 #č Má cenu integrovat pouze nejreprezentativnější sadu bodů.
448 #č Špatné sady je velmi obtižné správně započítavat
449 #č aby nezhoršovaly výsledek.
450 #č Možná TrueIS by si mohl s tím poradit, ale
451 #č zde je to totálná zbytečnost
452 # -1 = 'outside', 0=success, 1=failure, 2=mix
453 couple = self.nodes[nodes_idx].couple
454 if self.nodes[nodes_idx].event_id == 2:
455 self.mixed_couples[couple] = nodes_idx
456 else:
457 self.red_couples[couple] = nodes_idx
458
438 459
439 460 def update_couple(self, couple_indices, event_id): def update_couple(self, couple_indices, event_id):
440 461 pass pass
 
... ... class ContactVoronoi:
494 515
495 516 #č uspěch. Dodáme bodíkům vahy #č uspěch. Dodáme bodíkům vahy
496 517 nodes = self.nodes[idx] nodes = self.nodes[idx]
497 w = nodes.pdf(self.model_space) / pdf[mask]
518 w = nodes.pdf(self.model_space) / pdf[mask] / self.ns
498 519 nodes.w = w nodes.w = w
520 #č a doporučíme (dočasně) k integraci
521 self._recommend_to_integrate(idx)
499 522
500 #č a spustíme adaptivní IS vzorkování
501 self.init_sampling()
523 if len(nodes) > ndim:
524 #č a spustíme adaptivní IS vzorkování
525 self.init_sampling(nodes, couple_indices, event_id)
502 526
503 527
504 528
529 def init_sampling(self, nodes, couple_indices, event_id):
530 """
531 č zde jenom vzorkujeme;
532 č integrování, vyhodnocování je jinde a později!
533 """
505 534
506 535
536 # nechám s velkým písmenkem
537 Vor_mask = ii==i
538 h_plan_model_ma = h_plan_model[Vor_mask]
539 weights_sim = h_plan.w[Vor_mask]
540 # dd1 jsou vzdalenosti tečiček do centra Voroneho buňky
541 dd1 = dd[Vor_mask]
507 542
508 def IS_norm(f, mean=0, std=1, sampling_space='G', nis=1000, design=None):
509 """
510 mean: [0.05, 2, 100500]
511 std: [0.05, 2, 100500]
512 543
513 design(nis, nvar) should return sampling plan in hypercube (U space)!
514 """
544 nis_ma = len(weights_sim)
545
546 h_plan_sing_ma = getattr(h_plan, sampling_space)[Vor_mask]
547
548 nodes_model = getattr(nodes, self.model_space)
549 nodes_pdf = nodes.pdf(self.model_space)
550 S_bc = np.cov(nodes_model, rowvar=False, bias=True, aweights=nodes_pdf)
551 barycenter = np.average(nodes_model, axis=0, weights=nodes_pdf)
552 trace = np.trace(S_bc)
553 nodes.set_attrs(S_bc=S_bc, barycenter=barycenter, trace=trace)
554
555 # matika
556 w, v = np.linalg.eig(S_bc)
557
558 if np.any(w <= 0):
559 return None
560
561 goal = trace*len(nodes)
562
563 # effective nis
564 nis_eff = nis
565 #print("start", i)
566
567 max_trace = 0
568 while max_trace < trace:
569 trace = np.trace(S_bc)
570
571 # matika
572 w, v = np.linalg.eig(S_bc)
573
574 # r_ball/r_base = sigma
575
576 # use IS sampling density with center equal to the cell's barycenter
577 # set the minimum distance as the standard deviation of the IS densisty
578 # u stats.norm zadáváme směrodatnou odchylku, to je asi správné
579 sigmas = np.sqrt(w) / base_r #(sample_box.nvar+2) #! dosadit standard deviation podle chutí
580 h_i = [stats.norm(0, sigmas[j]) for j in range(sample_box.nvar)]
581 # rozdělení ve vlastním prostoru
582 # select nis = 100 points from IS density
583 h_L = f_models.UnCorD(h_i)(nis)
584
585 # здесь уже так легко не отделаемся. Трансформовать кароно.
586 h_plan_bc = (v @ h_L.R.T).T
587 h_plan_sing = h_plan_bc + barycenter
588
589 # sice to má nazev h_plan, ale nese rozdělení a hustoty v f-ku
590 h_plan_part = f.new_sample(h_plan_sing, sampling_space)
591
592
593 # jdeme na ty hustoty
594 # mně příjde, že je to legalní
595 # sice samply podporujou maskovaní, to je ale drahé
596 weights_sim_part = h_plan_part.pdf(sampling_space) / h_L.pdf('R') # snad je to správně
597 h_plan.add_sample(CandyBox(h_plan_part, w=weights_sim_part))
598
599 # vyfiltrujeme vzorky
600 h_plan_model_part = getattr(h_plan_part, model_space)
601 dd, ii = tree.query(h_plan_model_part, k=1, p=p_norm)
602
603 # parta
604 Vor_mask_part = ii==i
605 weights_sim_part = weights_sim_part[Vor_mask_part]
606
607 nis_ma = len(weights_sim_part)
608
609 # zajišťovat Vor_mask je docela zbytečně, je to jen pro out_nodes,
610 # které se zatím nikdě nepouživá
611 Vor_mask = np.append(Vor_mask, Vor_mask_part)
612
613
614
615
616 h_plan_model_ma = np.vstack((h_plan_model_ma, h_plan_model_part[Vor_mask_part]))
617 weights_sim = np.append(weights_sim, weights_sim_part)
618 # dd1 jsou vzdalenosti tečiček do centra Voroneho buňky
619 dd1 = np.append(dd1, dd[Vor_mask_part])
620
621 # zkusím těžiště počitat jen pro partu - možná tak algoritmus bude agresivnější?
622 #barycenter = np.average(h_plan_sing[Vor_mask_part], axis=0, weights=weights_sim_part)
623 h_plan_sing_ma = np.vstack((h_plan_sing_ma, h_plan_sing[Vor_mask_part]))
624 #S_bc = np.cov(h_plan_sing[Vor_mask_part], rowvar=False, bias=True, aweights=weights_sim_part)
625 S_bc = np.cov(h_plan_sing_ma, rowvar=False, bias=True, aweights=weights_sim)
626 barycenter = np.average(h_plan_sing_ma, axis=0, weights=weights_sim)
627 nis_eff += nis
628
629
630
631 #print(S_bc)
632 #print(nis_ma)
633
634
635
636
637 cell_stats = dict()
638 # musí sa (na konci) rovnat jedne
639 # opravdu dělíme nis'em, jako v normálním IS
640 # nikoliv počtem příjatých bodíků,
641 # protože průměrná vaha je o hodně mene významná metrika
642 cell_stats['cell_probability'] = np.sum(weights_sim) / nis_eff
643
644
645 # tu bacha!
646 # takhle se počíta, pokud se netrapíme gradijentem
647 # a je to trošiňku optimizovaný, takže čert se nevyzná
648 if gradient is None:
649 # indexy ii nás moc nezajimajou
650 # vzdalenosti snad byjsme zvladli použit?
651
652
653 dd2, ii2 = tree.query(h_plan_model_ma, k=[2], p=p_norm)
654 dd2 = dd2.flatten()
655 ii2 = ii2.flatten()
656
657 # tahle hračka s indexy je pro numpy poměrně drahá
658 failsii_2 = failsi[ii2]
659 # jeden vzorek (včetně hustoty PDF[i]) je nám vždy znám
660 # porucha
661 if failsi[i]:
662 points_1 = PDF[i] * dd2
663 node_pf_estimations = points_1 / (points_1 + PDF[ii2] * dd1)
664 node_pf_estimations = np.where(failsii_2,1, node_pf_estimations)
665 node_pf_pure_estimations = dd2 / (dd1 + dd2)
666 node_pf_pure_estimations = np.where(failsii_2,1, node_pf_pure_estimations)
667
668 cell_stats['Voronoi_2_point_upper_bound'] = cell_stats['cell_probability']
669 cell_stats['Voronoi_2_point_failure_rate'] = np.sum(weights_sim * node_pf_estimations) / nis_eff
670 cell_stats['Voronoi_2_point_pure_failure_rate'] = np.sum(weights_sim * node_pf_pure_estimations) / nis_eff
671 cell_stats['Voronoi_2_point_lower_bound'] = np.sum(weights_sim[failsii_2]) / nis_eff
672 cell_stats['Voronoi_failure_rate'] = cell_stats['cell_probability']
673 nodes=CandyBox(h_plan.sampling_plan[Vor_mask], w=weights_sim, node_pf_estimations=node_pf_estimations,\
674 node_pf_pure_estimations=node_pf_pure_estimations, dd1=dd1, dd2=dd2, ii2=ii2)
675
676 # neporucha
677 else:
678 dd1 = dd1[failsii_2]
679 dd2 = dd2[failsii_2]
680 points_1 = PDF[i] * dd2
681 points_2 = PDF[ii2[failsii_2]] * dd1
682
683 node_pf_estimations = points_2 / (points_1 + points_2)
684 node_pf_pure_estimations = dd1 / (dd1 + dd2)
685
686 cell_stats['Voronoi_2_point_upper_bound'] = np.sum(weights_sim[failsii_2]) / nis_eff
687 cell_stats['Voronoi_2_point_failure_rate'] = np.sum(weights_sim[failsii_2]*node_pf_estimations) / nis_eff
688 cell_stats['Voronoi_2_point_pure_failure_rate'] = np.sum(weights_sim[failsii_2] * node_pf_pure_estimations) / nis_eff
689 cell_stats['Voronoi_2_point_lower_bound'] = 0
690 cell_stats['Voronoi_failure_rate'] = 0
691 nodes=CandyBox(h_plan.sampling_plan[Vor_mask][failsii_2], w=weights_sim[failsii_2], node_pf_estimations=node_pf_estimations,\
692 node_pf_pure_estimations=node_pf_pure_estimations, dd1=dd1, dd2=dd2, ii2=ii2[failsii_2])
693
694 # take something with corresponding length
695 zeros = np.zeros(len(weights_sim) - len(dd2))
696 # add remaining nodes
697 nodes.add_sample(CandyBox(h_plan.sampling_plan[Vor_mask][~failsii_2], w=weights_sim[~failsii_2], node_pf_estimations=zeros,\
698 node_pf_pure_estimations=zeros, ii2=ii2[~failsii_2]))
515 699
516 sampling_plan_N, pdf = IS_stat.get_norm_plan(nis, f.nvar, mean, std, design)
517 700
518 #č tady musíme provést jeden trik
519 #č totež jako v IS_like - ve výsledku dycky dostaneme f_model
520 to_sample = f.new_sample(sampling_plan_N, sampling_space) #č naše N-ko smerdžíme ako G-čko
521 w = to_sample.pdf(sampling_space) / pdf #č snad je to správně
701 # takhle - pokud chceme gradient použit
702 # je třeba eště zoptimalizovať
703 else:
704 # kolik bodíků jsou nejbližší k mému vzorečkovi
705 # np.empty() implicitně má dtype=float
706 # tyhle blbosti ponechám jen kvůli callbackovi
707 node_pf_estimations = np.empty(len(h_plan_model_ma))
708 node_pf_pure_estimations = np.empty(len(h_plan_model_ma))# pure distance estimation
709 node_failsi = np.empty(len(h_plan_model_ma), dtype=np.bool) # for L1 Voronoi # co to je za L1 Voronoi?
710
711 # projdeme přes každej bodíček
712 for node_idx in range(len(h_plan_model_ma)):
713 # KDTree byl použit jen k rozdělení na disjunktní úseky, veškerej děj se odehravá tu
714 # a to je všechno kvůli gradientu
715 node = h_plan_model_ma[node_idx]
716 # axis=1 - sčítá všechy směry dohromady, vysledkem je 1D pole rozměru nsim
717 inode2points_model_matrix = np.sum(np.abs(((sampled_plan_model - node) * gradient(node))**p_norm), axis=1)
718 #print(inode2points_Rd_matrix)
719
720 """
721 partition -
722 Creates a copy of the array with its elements rearranged in such a way that
723 the value of the element in k-th position is in the position it would be in a sorted array.
724 All elements smaller than the k-th element are moved before this element
725 and all equal or greater are moved behind it. The ordering of the elements in the two partitions is undefined.
726 """
727 idx = np.argpartition(inode2points_model_matrix, 1) # musí tu bejt 1, počítá sa od nuly
728 # je to správný, neboť numpy zaručuje, že druhej prvek (s indexem 1) bude na druhem místě
729 node_failsi[node_idx] = failsi[idx[0]]
730
731
732 points_weight = PDF[idx[0]] / inode2points_model_matrix[idx[0]] + PDF[idx[1]] / inode2points_model_matrix[idx[1]]
733 points_distances = 1 / inode2points_model_matrix[idx[0]] + 1 / inode2points_model_matrix[idx[1]]
734
735 failure_weight = int(failsi[idx[0]]) * PDF[idx[0]] / inode2points_model_matrix[idx[0]]
736 failure_weight += int(failsi[idx[1]]) * PDF[idx[1]] / inode2points_model_matrix[idx[1]]
737
738 failure_distance = int(failsi[idx[0]]) / inode2points_model_matrix[idx[0]] + int(failsi[idx[1]]) / inode2points_model_matrix[idx[1]]
739
740
741 node_pf_estimations[node_idx] = failure_weight/points_weight
742 node_pf_pure_estimations[node_idx] = failure_distance/points_distances
743
744
745 cell_stats['Voronoi_2_point_upper_bound'] = np.sum(h_plan.w[Vor_mask]*np.ceil(node_pf_estimations)) / nis_eff
746 cell_stats['Voronoi_2_point_failure_rate'] = np.sum(h_plan.w[Vor_mask]*node_pf_estimations) / nis_eff
747 cell_stats['Voronoi_2_point_pure_failure_rate'] = np.sum(h_plan.w[Vor_mask]*node_pf_pure_estimations) / nis_eff
748 cell_stats['Voronoi_2_point_lower_bound'] = np.sum(h_plan.w[Vor_mask]*np.floor(node_pf_estimations)) / nis_eff
749 cell_stats['Voronoi_failure_rate'] = np.sum(h_plan.w[Vor_mask]*node_failsi) / nis_eff
750
751 nodes=CandyBox(h_plan.sampling_plan[Vor_mask], w=h_plan.w[Vor_mask], node_pf_estimations=node_pf_estimations,\
752 node_pf_pure_estimations=node_pf_pure_estimations, node_failsi=node_failsi)
753
754
755
756 for key, value in cell_stats.items():
757 global_stats[key] += value
758
759 # kolbek ↓
760 if failsi[i]:
761 cell_stats['event'] = 'failure'
762 else:
763 cell_stats['event'] = 'success'
764 callback(estimation=estimation, nodes=nodes, cell_stats=cell_stats, out_nodes=h_plan[~Vor_mask])
765
766 # -1 = 'outside', 0=success, 1=failure, 2=mix
767 if event_id == 2:
768 self.mixed_couples.add(couple_indices)
769 if self.on_add_mixed is not None:
770 self.on_add_mixed(idx)
771 else:
772 self.red_couples.add(couple_indices)
773
774
775
776 # find distance to the nearest sampling point (from all points)
777 dd2, ii2 = tree_sampling.query(sampled_plan_sing, k=[2], p=2)
778 mindist_sing = dd2.flatten()
779
780
781
782
783 def score(self, nodes):
784
785 if len(nodes) < nodes.nvar + 1:
786 # in case of update reset the final score only
787 nodes.score = -2
788 return None
789
790 nodes_model = getattr(nodes, self.model_space)
791 nodes_pdf = nodes.pdf(self.model_space)
792 S_bc = np.cov(nodes_model, rowvar=False, bias=True, aweights=nodes_pdf)
793
794 #č matika
795 w, v = eig = np.linalg.eig(S_bc)
796
797 if not np.all(w > 0):
798 nodes.score = -1
799 return None
522 800
523 #č vahy máme
524 #č zabalme do boxu
525 #č zbytek už nejsou naši starosti
526 return CandyBox(to_sample, w=w)
801 barycenter = np.average(nodes_model, axis=0, weights=nodes_pdf)
802 nodes.set_attrs(eig=eig, barycenter=barycenter)
803 nodes.score = np.trace(S_bc) * np.sqrt(len(nodes))
527 804
528 805
529 806
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/iam-git/WellMet

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/iam-git/WellMet

Clone this repository using git:
git clone git://git.rocketgit.com/user/iam-git/WellMet

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main