Eufisky - The lost book

## 曾经的两道矩阵正定问题的解答

$\left( {\begin{array}{*{20}{c}}{{X^T}AX}&{{I_m}}\\{{I_m}}&{{X^T}{A^{ - 1}}X}\end{array}} \right) = \left( {\begin{array}{*{20}{c}}{{X^T}AX}&{{X^T}X}\\{{X^T}X}&{{X^T}{A^{ - 1}}X}\end{array}} \right) = \left( {\begin{array}{*{20}{c}}{{X^T}}&0\\0&{{X^T}}\end{array}} \right)\left( {\begin{array}{*{20}{c}}A&{{I_n}}\\{{I_n}}&{{A^{ - 1}}}\end{array}} \right)\left( {\begin{array}{*{20}{c}}X&0\\0&X\end{array}} \right).$

$\left( {\begin{array}{*{20}{c}}{{I_n}}&0\\{ - {A^{ - 1}}}&{{I_n}}\end{array}} \right)\left( {\begin{array}{*{20}{c}}A&{{I_n}}\\{{I_n}}&{{A^{ - 1}}}\end{array}} \right)\left( {\begin{array}{*{20}{c}}{{I_n}}&{ - {A^{ - 1}}}\\0&{{I_n}}\end{array}} \right) = \left( {\begin{array}{*{20}{c}}A&0\\0&0\end{array}} \right).$

$$\left(\min\{x_i,x_j\}\right)_{n\times n}= \begin{pmatrix} x_1&x_1&x_1&\cdots&x_1\\ x_1&x_2&x_2&\cdots &x_2\\ x_1&x_2&x_3&\cdots&x_3\\ \cdots&\cdots&\cdots&\cdots\\ x_1&x_2&x_3&\cdots&x_n \end{pmatrix}$$

\begin{align*}&\begin{pmatrix} a_1&\\ a_1&a_2\\ a_1&a_2&a_3\\ \cdots&\cdots&\cdots&\cdots\\ a_1&a_2&a_3&\cdots&a_n \end{pmatrix} \begin{pmatrix} a_1&a_1&a_1&\cdots&a_1\\ 0&a_2&a_2&\cdots &a_2\\ 0&0&a_3&\cdots&a_3\\ \cdots&\cdots&\cdots&\cdots\\ 0&0&0&\cdots&a_n \end{pmatrix}\\=& \begin{pmatrix} a_1^2&a_1^2&a_1^2&\cdots&a_1^2\\ a_1^2&a_1^2+a_2^2&a_1^2+a_2^2&\cdots &a_1^2+a_2^2\\ a_1^2&a_1^2+a_2^2&a_1^2+a_2^2+a_3^2&\cdots&a_1^2+a_2^2+a_3^2\\ \cdots&\cdots&\cdots&\cdots\\ a_1^2&a_1^2+a_2^2&a_1^2+a_2^2+a_3^2&\cdots&a_1^2+a_2^2+a_3^2+\cdots+a_n^2 \end{pmatrix}.\end{align*}

$$\left(\min\{x_i,x_j\}\right)_{n\times n}= \begin{pmatrix} x_1&x_1&x_1&\cdots&x_1\\ x_1&x_2&x_2&\cdots &x_2\\ x_1&x_2&x_3&\cdots&x_3\\ \cdots&\cdots&\cdots&\cdots\\ x_1&x_2&x_3&\cdots&x_n \end{pmatrix}$$

\begin{align*} &\sum_{i,j=1}^Na_{i}a_{j}\min\{x_{\sigma(i)},x_{\sigma(j)}\}=\sum_{i=1}^N\left(\sum_{j=1}^Na_{i}a_{j}\min\{x_{\sigma(i)},x_{\sigma(j)}\}\right)\\=&\sum_{i=1}^N\left(\sum_{j=1}^Na_{i}a_{\sigma^{-1}(j)}\min\{x_{\sigma(i)},x_{j}\}\right)=\sum_{j=1}^N\left(\sum_{i=1}^Na_{i}a_{\sigma^{-1}(j)}\min\{x_{\sigma(i)},x_{j}\}\right)\\=&\sum_{j=1}^N\left(\sum_{i=1}^Na_{\sigma^{-1}(i)}a_{\sigma^{-1}(j)}\min\{x_{i},x_{j}\}\right)=\sum_{i,j=1}^Na_{\sigma^{-1}(i)}a_{\sigma^{-1}(j)}\min\{x_{i},x_{j}\}. \end{align*}

$$\left(e^{x_i+x_j-|x_i-x_j|}\right)_{n\times n}= \begin{pmatrix} e^{2x_1}&e^{2x_1}&e^{2x_1}&\cdots&e^{2x_1}\\ e^{2x_1}&e^{2x_2}&e^{2x_2}&\cdots &e^{2x_2}\\ e^{2x_1}&e^{2x_2}&e^{2x_3}&\cdots&e^{2x_3}\\ \cdots&\cdots&\cdots&\cdots\\ e^{2x_1}&e^{2x_2}&e^{2x_3}&\cdots&e^{2x_n} \end{pmatrix}$$

$$\left(\frac{1}{1+\theta(x_i+x_j)+(1-\theta)|x_i-x_j|}\right)_{n\times n}$$

\begin{align*} &\sum_{i,j=1}^N\frac{a_ia_j}{1+\theta(x_i+x_j)+(1-\theta)|x_i-x_j|}\\=&\sum_{i,j=1}^Na_ia_j\int_0^\infty e^{-t(1+\theta(x_i+x_j)+(1-\theta)|x_i-x_j|)}dt\\ =&\int_0^\infty e^{-t}\left(\sum_{i,j=1}^N(a_ie^{-tx_i})(a_je^{-tx_j})e^{t(1-\theta)(x_i+x_j-|x_i-x_j|)}\right)dt. \end{align*}

$$\left( {\frac{{\ln \left( {1 + {x_i} + {x_j}} \right) - \ln \left( {1 + \left| {{x_i} - {x_j}} \right|} \right)}}{{{x_i} + {x_j} - \left| {{x_i} - {x_j}} \right|}}} \right)_{n \times n}$$

$${\frac{{\ln \left( {1 + {x_i} + {x_j}} \right) - \ln \left( {1 + \left| {{x_i} - {x_j}} \right|} \right)}}{{{x_i} + {x_j} - \left| {{x_i} - {x_j}}\right|}}}=\int_0^1 \frac{1}{1+\theta(x_i+x_j)+(1-\theta)|x_i-x_j|} d\theta.$$