Fork of the official github repository of the framework Leaky-LWE-Estimator, a Sage Toolkit to attack and estimate the hardness of LWE with Side Information. https://github.com/lducas/leaky-LWE-Estimator
Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

geometry.sage 10KB

  1. from numpy.linalg import inv as np_inv
  2. # from numpy.linalg import slogdet as np_slogdet
  3. from numpy import array
  4. import numpy as np
  5. def dual_basis(B):
  6. """
  7. Compute the dual basis of B
  8. """
  9. return B.pseudoinverse().transpose()
  10. def projection_matrix(A):
  11. """
  12. Construct the projection matrix orthogonally to Span(V)
  13. """
  14. S = A * A.T
  15. return A.T * S.inverse() * A
  16. def project_against(v, X):
  17. """ Project matrix X orthonally to vector v"""
  18. # Pv = projection_matrix(v)
  19. # return X - X * Pv
  20. Z = (X * v.T) * v / scal(v * v.T)
  21. return X - Z
  22. # def make_primitive(B, v):
  23. # assert False
  24. # # project and Scale v's in V so that each v
  25. # # is in the lattice, and primitive in it.
  26. # # Note: does not make V primitive as as set of vector !
  27. # # (e.g. linear dep. not eliminated)
  28. # PB = projection_matrix(B)
  29. # DT = dual_basis(B).T
  30. # v = vec(v) * PB
  31. # w = v * DT
  32. # den = lcm([x.denominator() for x in w[0]])
  33. # num = gcd([x for x in w[0] * den])
  34. # if num==0:
  35. # return None
  36. # v *= den/num
  37. # return v
  38. def vol(B):
  39. return sqrt(det(B * B.T))
  40. def project_and_eliminate_dep(B, W):
  41. # Project v on Span(B)
  42. PB = projection_matrix(B)
  43. V = W * PB
  44. rank_loss = V.nrows() - V.rank()
  45. if rank_loss > 0:
  46. print("WARNING: their were %d linear dependencies out of %d " %
  47. (rank_loss, V.nrows()))
  48. V = V.LLL()
  49. V = V[rank_loss:]
  50. return V
  51. def is_cannonical_direction(v):
  52. v = vec(v)
  53. return sum([x != 0 for x in v[0]]) == 1
  54. def cannonical_param(v):
  55. v = vec(v)
  56. assert is_cannonical_direction(v)
  57. i = [x != 0 for x in v[0]].index(True)
  58. return i, v[0, i]
  59. def xgcd_of_list(a):
  60. (g, s, t) = xgcd(a[0], a[1])
  61. bezouts = [s, t]
  62. for v in a[2:]:
  63. (g, s_, t_) = xgcd(g, v)
  64. bezouts = [b*s_ for b in bezouts]
  65. bezouts.append(t_)
  66. return (g, bezouts)
  67. def remove_linear_dependencies(B, dim=None):
  68. nrows = B.nrows()
  69. if dim is None or nrows > dim:
  70. # Determine the number of dependencies
  71. K, r = None, None
  72. if dim is None:
  73. K = B.left_kernel().basis_matrix() # I assume that the cost of "left_kernel" is negligeable before "LLL"
  74. r = K.dimensions()[0]
  75. else:
  76. r = nrows-dim
  77. <<<<<<< HEAD
  78. if r == 1 and False:
  79. print("Use Better Algo")
  80. =======
  81. if r == 1 and False:
  82. >>>>>>> 81324f49c90b518d464e18b0204408a3aa6b5eff
  83. # Find a linear dependency
  84. if K is None:
  85. K = B.left_kernel().basis_matrix()
  86. assert K.dimensions()[0] == 1
  87. combinaison = K[0]
  88. combinaison *= lcm([v.denominator() for v in combinaison])
  89. print(combinaison)
  90. pivot, pivot_value = None, None
  91. for ind, value in enumerate(combinaison):
  92. if abs(value) == 1:
  93. pivot, pivot_value = ind, value
  94. break
  95. if pivot_value is None:
  96. print('Complex case')
  97. for ind, value in enumerate(combinaison):
  98. if abs(value) > 0 and gcd([v for i,v in enumerate(combinaison) if i!=ind]) == 1:
  99. if pivot is None or abs(value)<abs(pivot_value):
  100. pivot, pivot_value = ind, value
  101. if pivot_value < 0:
  102. combinaison = vector(ZZ, [-v for v in combinaison])
  103. _, bezouts = xgcd_of_list([v for i,v in enumerate(combinaison) if i!=pivot])
  104. factor = combinaison[pivot]-1
  105. for i in range(len(combinaison)):
  106. ind = i if i < pivot else i-1
  107. if i != pivot:
  108. B[i] += factor*bezouts[ind]*B[pivot]
  109. combinaison[pivot] += -factor
  110. assert (combinaison*B).is_zero(), 'It is not a linear dependency anymore !'
  111. assert abs(combinaison[pivot]) == 1, f'Error abs(combinaison[pivot]) == {abs(combinaison[pivot])} != 1'
  112. B = B[[i for i in range(B.dimensions()[0]) if i != pivot]]
  113. else:
  114. B = B.LLL()
  115. B = B[r:]
  116. return B
  117. def lattice_orthogonal_section(D, V, maintains_basis=True):
  118. """
  119. Compute the intersection of the lattice L(B)
  120. with the hyperplane orthogonal to Span(V).
  121. (V can be either a vector or a matrix)
  122. INPUT AND OUTPUT DUAL BASIS
  123. Algorithm:
  124. - project V onto Span(B)
  125. - project the dual basis onto orth(V)
  126. - eliminate linear dependencies (LLL)
  127. - go back to the primal.
  128. """
  129. #V = project_and_eliminate_dep(D, V) ## No need because D is full-rank
  130. #r = V.nrows()
  131. # Project the dual basis orthogonally to v
  132. PV = projection_matrix(V)
  133. D = D - D * PV
  134. # Eliminate linear dependencies
  135. if maintains_basis:
  136. D = remove_linear_dependencies(D)
  137. # Go back to the primal
  138. return D
  139. def lattice_project_against(B, V, maintains_basis=True):
  140. """
  141. Compute the projection of the lattice L(B) orthogonally to Span(V). All vectors if V
  142. (or at least their projection on Span(B)) must belong to L(B).
  143. Algorithm:
  144. - project V onto Span(B)
  145. - project the basis onto orth(V)
  146. - eliminate linear dependencies (LLL)
  147. """
  148. # Project v on Span(B)
  149. #V = project_and_eliminate_dep(B, V) ## No need because D is full-rank
  150. #r = V.nrows() # Useless
  151. # Check that V belogs to L(B)
  152. D = dual_basis(B)
  153. M = D * V.T
  154. if not lcm([x.denominator() for x in M.list()]) == 1:
  155. raise ValueError("Not in the lattice")
  156. # Project the basis orthogonally to v
  157. PV = projection_matrix(V)
  158. B = B - B * PV
  159. # Eliminate linear dependencies
  160. if maintains_basis:
  161. B = remove_linear_dependencies(B)
  162. # Go back to the primal
  163. return B
  164. def lattice_modular_intersection(D, V, k, maintains_basis=True):
  165. """
  166. Compute the intersection of the lattice L(B) with
  167. the lattice {x | x*V = 0 mod k}
  168. (V can be either a vector or a matrix)
  169. Algorithm:
  170. - project V onto Span(B)
  171. - append the equations in the dual
  172. - eliminate linear dependencies (LLL)
  173. - go back to the primal.
  174. """
  175. # Project v on Span(B)
  176. #V = project_and_eliminate_dep(D, V) ## No need because D is full-rank
  177. #r = V.nrows() # Useless
  178. # append the equation in the dual
  179. V /= k
  180. D = D.stack(V)
  181. # Eliminate linear dependencies
  182. if maintains_basis:
  183. D = remove_linear_dependencies(D)
  184. # Go back to the primal
  185. return D
  186. def is_diagonal(M):
  187. if M.nrows() != M.ncols():
  188. return False
  189. A = M.numpy()
  190. return np.all(A == np.diag(np.diagonal(A)))
  191. def logdet(M, exact=False):
  192. """
  193. Compute the log of the determinant of a large rational matrix,
  194. tryping to avoid overflows.
  195. """
  196. if not exact:
  197. MM = array(M, dtype=float)
  198. _, l = slogdet(MM)
  199. return l
  200. a = abs(M.det())
  201. l = 0
  202. while a > 2**32:
  203. l += RR(32 * ln(2))
  204. a /= 2**32
  205. l += ln(RR(a))
  206. return l
  207. def degen_inverse(S, B=None):
  208. """ Compute the inverse of a symmetric matrix restricted
  209. to its span
  210. """
  211. # Get an orthogonal basis for the Span of B
  212. if B is None:
  213. # Get an orthogonal basis for the Span of B
  214. V = S.echelon_form()
  215. V = V[:V.rank()]
  216. P = projection_matrix(V)
  217. else:
  218. P = projection_matrix(B)
  219. # make S non-degenerated by adding the complement of span(B)
  220. C = identity_matrix(S.ncols()) - P
  221. Sinv = (S + C).inverse() - C
  222. assert S * Sinv == P, "Consistency failed (probably not your fault)."
  223. assert P * Sinv == Sinv, "Consistency failed (probably not your fault)."
  224. return Sinv
  225. def degen_logdet(S, B=None):
  226. """ Compute the determinant of a symmetric matrix
  227. sigma (m x m) restricted to the span of the full-rank
  228. rectangular (k x m, k <= m) matrix V
  229. """
  230. # Get an orthogonal basis for the Span of B
  231. if B is None:
  232. # Get an orthogonal basis for the Span of B
  233. V = S.echelon_form()
  234. V = V[:V.rank()]
  235. P = projection_matrix(V)
  236. else:
  237. P = projection_matrix(B)
  238. # Check that S is indeed supported by span(B)
  239. #assert S == P.T * S * P
  240. assert (S - P.T * S * P).norm() <= 1e-10
  241. # make S non-degenerated by adding the complement of span(B)
  242. C = identity_matrix(S.ncols()) - P
  243. l3 = logdet(S + C)
  244. return l3
  245. def square_root_inverse_degen(S, B=None):
  246. """ Compute the determinant of a symmetric matrix
  247. sigma (m x m) restricted to the span of the full-rank
  248. rectangular (k x m, k <= m) matrix V
  249. """
  250. if B is None:
  251. # Get an orthogonal basis for the Span of B
  252. V = S.echelon_form()
  253. V = V[:V.rank()]
  254. P = projection_matrix(V)
  255. else:
  256. P = projection_matrix(B)
  257. # make S non-degenerated by adding the complement of span(B)
  258. C = identity_matrix(S.ncols()) - P
  259. S_inv = np_inv(array((S + C), dtype=float))
  260. S_inv = array(S_inv, dtype=float)
  261. L_inv = cholesky(S_inv)
  262. L_inv = round_matrix_to_rational(L_inv)
  263. L = L_inv.inverse()
  264. return L, L_inv
  265. def build_standard_substitution_matrix(V, pivot=None, data=None, output_data=False):
  266. _, pivot = V.nonzero_positions()[0] if (pivot is None) else (None, pivot)
  267. assert V[0,pivot] != 0, 'The value of the pivot must be non-zero.'
  268. dim = V.ncols()
  269. V1 = - V[0,:pivot] / V[0,pivot]
  270. V2 = - V[0,pivot+1:] / V[0,pivot]
  271. Gamma = zero_matrix(QQ, dim,dim-1)
  272. Gamma[:pivot,:pivot] = identity_matrix(pivot)
  273. Gamma[pivot,:pivot] = V1
  274. Gamma[pivot,pivot:] = V2
  275. Gamma[pivot+1:,pivot:] = identity_matrix(dim-pivot-1)
  276. data = data if not output_data else {}
  277. if data is not None:
  278. norm2 = 1 + scal(V1*V1.T) + scal(V2*V2.T)
  279. normalization_matrix = zero_matrix(QQ, dim-1,dim-1)
  280. normalization_matrix[:pivot,:pivot] = identity_matrix(pivot) - V1.T*V1 / norm2
  281. normalization_matrix[pivot:,pivot:] = identity_matrix(dim-pivot-1) - V2.T*V2 / norm2
  282. normalization_matrix[:pivot,pivot:] = - V1.T*V2 / norm2
  283. normalization_matrix[pivot:,:pivot] = - V2.T*V1 / norm2
  284. data['det'] = norm2
  285. data['normalization_matrix'] = normalization_matrix
  286. if output_data:
  287. return Gamma, data
  288. else:
  289. return Gamma