From 4d3fa2dd5b2e8c67f1625d752b1cf1a2894d8b18 Mon Sep 17 00:00:00 2001 From: Ryan <ryan@canvas.build> Date: Sun, 10 Apr 2022 15:57:20 -0700 Subject: [PATCH] added unscented_GT evaluate_from_sigma_points and test cases --- .../evaluate_from_sigma_points.cpython-38.pyc | Bin 0 -> 715 bytes ...ralized_unscented_transform.cpython-38.pyc | Bin 0 -> 1583 bytes .../evaluate_from_sigma_points.py | 38 +++++ .../generalized_unscented_transform.py | 134 ++++++++++++++++++ .../random_variable_test.py | 42 ++++++ 5 files changed, 214 insertions(+) create mode 100644 src/generalized_unsented_transform/__pycache__/evaluate_from_sigma_points.cpython-38.pyc create mode 100644 src/generalized_unsented_transform/__pycache__/generalized_unscented_transform.cpython-38.pyc create mode 100644 src/generalized_unsented_transform/evaluate_from_sigma_points.py create mode 100644 src/generalized_unsented_transform/generalized_unscented_transform.py create mode 100644 src/generalized_unsented_transform/random_variable_test.py diff --git a/src/generalized_unsented_transform/__pycache__/evaluate_from_sigma_points.cpython-38.pyc b/src/generalized_unsented_transform/__pycache__/evaluate_from_sigma_points.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f46aad2a26990e15db0104cc86dcb03e11cf0bfe GIT binary patch literal 715 zcmZuv!H&}~5VakrNvkd=5W5@@r<Ec^^8*MWA+CE_aVdw$ZQL}h?L@Y-rO3VUA@s-( z@G-t}+F#(rOjeaDgspjbV?RGnM*ehhF(WXV>UQ-yCFD0I=Yv7=9>To>s7Xx~(R4{E za;Dfasgs(2Cre(lTT-MCpdtm0b1l4WzgB&>llEqfE~k+L!8<=dxL*K)d;#Apa$uhR zz(@iC<beiOvkg1qqz70H$t5{(R0YHoVgiu`dW4A{NBR`cW`QGvQ>Ym|;hP6uCr8Zw z^+uO-75}d)3rR@BBxD<o|9WF7RR1tSf{X(c)0n!g=w)Q4kBJ*}oIo%1pd$CJl9AV~ zXksd?75g~#Rv6cJPR4X>RguTJYa1<~bFJ|%W;@w7TOicL+2f9u!bG}^)5mn(-Jf>c zo!p)H-C+GsVxL1ze?FJnPRr8ng(-Enf->INZq@mA?Mi3YWh0HWLbcyyT@A)b<AIE& zZ91z<c`wu;ysS31)75!-i+&$pd^~MCp?f7O=Y?+_n&ai}2>@X;{*vWv!LHdgwO6oG uq;YNr-R~EwH9|EO-$+cyC7S^sRb%~UaEYS)t?ueU$#>X#hh3l3{Mlbj-Ne`c literal 0 HcmV?d00001 diff --git a/src/generalized_unsented_transform/__pycache__/generalized_unscented_transform.cpython-38.pyc b/src/generalized_unsented_transform/__pycache__/generalized_unscented_transform.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3349b0f03ac31167b4a9925486777134f77206ea GIT binary patch literal 1583 zcmah~&5s*36t_JenIx0!ZW^&m%VC6|C_?PEoS>@MmbQGTB1K(Qi$+3WGIp}l%tvZ_ z_DjwwT#=AC!D(~kALxm{#NvR&X>V}hz@<FTessG+suTOYdGF`v-+Q)ap0rvG1g(E} zYxw>uLVucvi_M0IJJ7{vFi=EsiZUExY;=;6&;n_tcIJdmWwTQ^tA(}93%yfJJ*?9j zb*TG24jYtgp`LdPu}}}|?%=Dt-wd|D?B9R1y*1c=^!VX}$6I}HIN}+XS&wKxizH9> z2h(DYaWEI@E{k0={j_yKHlg2xE;<Aui4>aSnJclvAZv<HG$H&WNscUv7lt`esEcNV zTGBppl)J$2WGTo5aJ<6V1}9wwyyrx^vR3KVoG3CU)SVMEgVvN~<ZEyqtl}w4c@wn2 z{P!xpSH-UbS8EO|@1t+unbiSPe-kOsj1$D4)B1_t1(d4aM$euryW;qdw4XMb=mL9N zImYuN=F7%+!Q;@ztL&?JP3oLlAwS>%`9L-PH_tU-y=v;!P?sXDqFGa+nXizl!HT|W zm`bjpS?k==G#0?M&lsuZHN(558mhS<*U)T9EvY7C*Z>{4AGMXQTB<#=7I?Non~;sE z-;!EZOCz$tlu-W%0&8@@dV;SswY<Q_qLvoe%!S!k)QY+?GUu(;KfKc~0x@BGITIof zQx+$qgwb2U=TRv{66G(QQBH$N$z>rD5x_qZqRbfO7!KI61;gPJh;@#*5bN9qvjj_7 zEEx?XtQ*Nhj1FEdaSr4x1z(Yion(4!ZrU*%U^M>aXVd=PJly;=qQMts2K*jyUhrGN z!!u^IU?Rv18N>zWOiT-)4QoXKO$6MSVQJ!ypZ+F)TC@M!979-We{3F`G~j~S(S9O& zE80p~&P`U@+lzRf<YS?2qiK>)wOv5HxFN<JgR!6#+Tl??X4*>fk+#h})izC{v9=;g zwYwu^6i>9fdm`%+(7;ZxURRT>)cEVr-5;2(Fw#^aB<V0D<xt~X<9qPxNOEJ=IMEpJ zyPE9ZfPNGDhauTB{ieqIAa3Z|9!thMQv8beyFl>Y?>2UdjBW6PDBsA6VUn^Bcrh$w z5{nJN<Bc)P8IRKBkkLVz3zowc4JwMGf@hcA@g@KI^gy>?2_fN?dGcyRmS=m!!wu}= zwvi;jo)wTbSeD_}u{_)%pj#dM23~t%zi_QCv2h1?KE8^5vW(xtZ#(=wh}X0G(0a}A zYFy-T3EP{?^-(EH*yr^rFJd^u+#D3`<YhKJ;BC-DU&P7uU_DLpC>@_{sb9IzOy}Fk jeZW^MRYkinTsJ|z+gU-&lzn23iZEXXA9C^`2e0!V@&m5S literal 0 HcmV?d00001 diff --git a/src/generalized_unsented_transform/evaluate_from_sigma_points.py b/src/generalized_unsented_transform/evaluate_from_sigma_points.py new file mode 100644 index 0000000..1a82ac3 --- /dev/null +++ b/src/generalized_unsented_transform/evaluate_from_sigma_points.py @@ -0,0 +1,38 @@ +from numpy.linalg import matrix_power +import numpy as np + + + +''' + This function is a tool that is used to evaluate the sample statistics + of sigma points of an unscented transform + + INPUTS + + sigma_points - Matrix of sigma points + weights - Vector of weights corresponding to each sigma point + + OUTPUTS + + sigma_mean - Sample mean of sigma points + sigma_cov - Sample covariance matrix of sigma points + sigma_skew - Sample diagonal component of skewness tensor + sigma_kurt - Sample diagonal component of kurtosis tensor +''' + +def Evaluate_sample_statistics(sigma_points, weights): + + n = sigma_points.shape[0] + # row_weights = [] + # weights = weights.reshape(1, -1) # Convert to row vector (need testing) + # Mean + sigma_mean = np.sum(np.matmul(sigma_points, np.tile(weights,(n, 1)))) + # Covariance + Z = (sigma_points - sigma_mean) + # print(np.diag(np.transpose(weights)[:,:])) + sigma_cov = np.sum(Z * np.diag(np.array(np.transpose(weights))[0])* np.transpose(Z)) + # Diagonal skewness + sigma_skew = np.sum(np.matmul(np.power(Z, 3), np.tile(weights,(n, 1)))) + # Diagonal kurtosis + sigma_kurt = np.sum(np.matmul(np.power(Z, 4), np.tile(weights,(n, 1)))) + return sigma_mean, sigma_cov, sigma_skew, sigma_kurt diff --git a/src/generalized_unsented_transform/generalized_unscented_transform.py b/src/generalized_unsented_transform/generalized_unscented_transform.py new file mode 100644 index 0000000..3800af8 --- /dev/null +++ b/src/generalized_unsented_transform/generalized_unscented_transform.py @@ -0,0 +1,134 @@ +from concurrent.futures.process import _MAX_WINDOWS_WORKERS +import numpy as np +from scipy.linalg import sqrtm +from numpy.linalg import matrix_power, solve +import warnings + + +''' +mu - Mean of random vector +P - Covariance matrix +x_skew - Vector of diagonal components of the skewness tensor +x_kurt - Vector of diagonal components of the kurtosis tensor +lb - Vector of lower bound of the state +ub - Vector of upper bound of the state + +OUTPUTS + +x - Matrix of sigma points +weights - Vector of weights corresponding to each sigma point +s - Vector of proportionality constants used to generate + the sigma points +''' +def generalized_ut(mu, P, x_skew=None, x_kurt=None, lb=None, ub=None): + + # Get the number of states + n = len(mu) + # Evaluate the matrix square root via singular value decomposition + # U1, S, Vh = np.linalg.svd(P) + # C = U1 * np.diag(np.sqrt(np.diag(S))) * U1 + C = sqrtm(P) + # Handle the arguments for skewness and kurtosis + if x_skew == None: # If no diagonal component of skewness is specified + warnings.warn('No skewness specified: Gaussian skewness and kurtosis is assumed') + x_skew = 0*mu # Assume gaussian skewness if not provided + x_kurt = 3*np.ones(n) # Assume gaussian diagonal kurtosis in turn + + if x_kurt == None: # If no diagonal component of kurtosis is specified + warnings.warn('No kurtosis specified: kurtosis is selected to satisfy skewness kurtosis relationship') + # Manually ensure kurtosis skew relationship is satisfied + x_kurt = matrix_power(C, 4) * matrix_power(solve((matrix_power(C, 3)), x_skew), 2) + x_kurt = 1.1 * x_kurt + + # Handle when specified kurtosis violates skewness kurtosis relationship + minkurt = matrix_power(C, 4) * matrix_power(solve((matrix_power(C, 3)), x_skew), 2) + + if (np.sum(x_kurt < minkurt)): + warnings.warn('Bad Human Error: Kurtosis does not correspond to a distribution') + for i in range(len(x_kurt)): + if x_kurt[i] < minkurt[i]: + x_kurt[i] = 1.001* minkurt[i] + + # Handle the arguments for lower bounds and upper bounds + if lb == None: # If lower bound is not specified manually set lower bound as -inf + lb = -np.inf * np.ones(n) + + if ub == None: # If lower bound is not specified manually set lower bound as -inf + ub = np.inf * np.ones(n) + + # Calculate parameters u and v + u = 0.5* ( -(solve(matrix_power(C, 3), x_skew) ) + + np.sqrt(4 * solve(matrix_power(C, 4), x_kurt) + - 3 * matrix_power(solve(matrix_power(C, 3) , x_skew), 2))) + v = u + solve(matrix_power(C, 3), x_skew) + # Generate the sigma points + x0 = mu + x1 = mu - C * np.diag(u) + x2 = mu + C * np.diag(v) + + # # --------------- This section handles the constraints --------------- + # Flag_constrain = 0 # Default flag to enforce constraint + # # Check if mean violates constraints + # if np.subtract(mu, lb).min() < 0 or np.subtract(mu, lb).min() < 0: + # Flag_constrain = 1 # Set flag to avoid enforcing state constraints + # warnings.warn('Unable fo enforce constraints: one or more of the mean does not satisfy lb < mean < ub') + + # if Flag_constrain == 0: + # theta = 0.9; # Default value of user defined slack parameter + + # # Ensure lower bound 'lb' is not violated + # Temp1 = np.subtract(np.hstack((x1, x2)), lb) + # L1 = np.nonzero(Temp1.min() < 0) # Find the location of sigma points that violate the lower bound + # Flag_calc = 0; # Flag that determines if skewness can be matched + # print(u) + # print(v) + # print(n) + # print(L1) + # for i in range(len(L1)): + # if L1[i] <= n: + # # Recalculate 'u' to satisfy lower bound 'lb' + # u[L1[i]] = theta * np.min(np.abs(np.subtract(mu, lb) / C[:, L1[i]])) + # else: + # # Recalculate 'v' to satisfy lower bound 'lb' + # print(L1[i]) + # v[L1[i] - n] = theta * np.min(np.abs(np.subtract(mu, lb) / C[:, L1[i]-n])) + # Flag_calc = 1 # Set flag + + # # Regenerate the sigma points + # x1 = mu - C * np.diag(u) + # x2 = mu + C * np.diag(v) + + # # Ensure upper bound 'ub' is not violated + # Temp2 = ub - np.hstack((x1, x2)) + # L2 = np.nonzero(min(Temp2) < 0) # Find the location of sigma points that + # # violate the upper bound + # for i in range(len(L2)): + # if L2(i) <= n: + # # Recalculate 'u' to satisfy upper bound 'ub' + # u[L2[i]] = theta * np.min(np.abs(np.subtract(mu, lb) / C[:, L1[i]])) + # else: + # # Recalculate 'v' to satisfy upper bound 'ub' + # v[L2[i] - v] = theta * np.min(np.abs(np.subtract(ub, mu) / C[:, L2[i]-n])) + # Flag_calc = 1 # Set flag + + # if Flag_calc == 0: + # # Now recalculate parameter 'v' to match diagonal componen of + # # skewness tensor because it was 'v' was not previously redefined + # v = u + np.solve(matrix_power(C, 3), x_skew) # only done of v was not redefined + + # # Regenerate the sigma points to reflect any change in 'u' or 'v' + # x1 = mu - C * np.diag(u) + # x2 = mu + C * np.diag(v) + + # Recalculate weights to reflect any change in 'u' or 'v' + w2 = np.ones(n) / v / np.add(u, v) + w1 = w2 * v / u + # Output sigma point values + x = np.hstack((x0, x1, x2)) + w0 = 1 - np.sum(np.vstack((w1, w2)), axis = 0) + weights = np.transpose(np.hstack((w0, np.transpose(w1), np.transpose(w2)))) + # s = np.vstack((u, v)) + # print(x) + # print(weights) + # print(s) + return x, weights \ No newline at end of file diff --git a/src/generalized_unsented_transform/random_variable_test.py b/src/generalized_unsented_transform/random_variable_test.py new file mode 100644 index 0000000..eda6df0 --- /dev/null +++ b/src/generalized_unsented_transform/random_variable_test.py @@ -0,0 +1,42 @@ +import numpy as np +from generalized_unscented_transform import generalized_ut +from evaluate_from_sigma_points import Evaluate_sample_statistics + + +def quadratic_transform(sigma_points): + sigma_points = np.array(sigma_points)[0] + y = [] + for x in sigma_points: + y.append(3*x + 2*(x**2)) + y = np.matrix(y) + return y + + +# Gaussian case +def test_Gaussian(): + m1 = 1 + m2 = 4 + m3 = 0 + m4 = 48 + sigma_points, weights = generalized_ut(np.matrix([m1]), np.matrix([m2]), np.matrix([m3]), np.matrix([m4])) + sigma_mean, sigma_cov, sigma_skew, sigma_kurt = Evaluate_sample_statistics(quadratic_transform(sigma_points), weights) + print(sigma_mean) + print(sigma_cov) + print(sigma_skew) + print(sigma_kurt) + +def test_Exp(): + m1 = 0.5 + m2 = 0.25 + m3 = 0.25 + m4 = 0.5625 + sigma_points, weights = generalized_ut(np.matrix([m1]), np.matrix([m2]), np.matrix([m3]), np.matrix([m4])) + sigma_mean, sigma_cov, sigma_skew, sigma_kurt = Evaluate_sample_statistics(quadratic_transform(sigma_points), weights) + print(sigma_mean) + print(sigma_cov) + print(sigma_skew) + print(sigma_kurt) + + +if __name__ == "__main__": + test_Exp() -- GitLab