1# -*- coding: utf-8 -*-
2"""
3Pruner
4
5EXAMPLE::
6
7    >>> from fpylll import *
8    >>> FPLLL.set_random_seed(1337)
9    >>> A = [IntegerMatrix.random(10, "qary", bits=10, k=5) for _ in range(20)]
10    >>> M = [GSO.Mat(a) for a in A]
11    >>> _ = [LLL.Reduction(m)() for m in M]
12    >>> radius = sum([m.get_r(0, 0) for m in M])/len(M)
13    >>> pr = Pruning.run(radius, 10000, [m.r() for m in M], 0.4)
14    >>> print(pr)  # doctest: +ELLIPSIS
15    PruningParams<7.797437, (1.00,...,0.80), 0.6594>
16
17    >>> print(Pruning.run(M[0].get_r(0, 0), 2**20, [m.r() for m in M], 0.9, pruning=pr))
18    PruningParams<1.001130, (1.00,...,0.98), 0.9410>
19
20
21..  moduleauthor:: Martin R.  Albrecht <martinralbrecht+fpylll@googlemail.com>
22"""
23
24include "fpylll/config.pxi"
25
26from libcpp cimport bool
27from libcpp.vector cimport vector
28from math import log, exp
29from cysignals.signals cimport sig_on, sig_off
30from cython.operator cimport dereference as deref, preincrement as inc
31
32from .decl cimport fp_nr_t, mpz_t, dpe_t, mpfr_t
33from .decl cimport nr_d, nr_dpe, nr_mpfr, pruner_core_t, d_t
34from .fplll cimport FT_DOUBLE, FT_LONG_DOUBLE, FT_DPE, FT_MPFR, FloatType
35from .fplll cimport PRUNER_METRIC_PROBABILITY_OF_SHORTEST, PRUNER_METRIC_EXPECTED_SOLUTIONS
36from .fplll cimport FP_NR, Z_NR
37from .fplll cimport prune as prune_c
38from .fplll cimport PruningParams as PruningParams_c
39from .fplll cimport Pruner as Pruner_c
40from .fplll cimport PrunerMetric
41from .fplll cimport svp_probability as svp_probability_c
42from .fplll cimport PRUNER_CVP, PRUNER_START_FROM_INPUT, PRUNER_GRADIENT, PRUNER_NELDER_MEAD, PRUNER_VERBOSE
43from .fplll cimport PRUNER_SINGLE, PRUNER_HALF
44
45
46from fpylll.util import adjust_radius_to_gh_bound, precision, FPLLL
47from fpylll.util cimport check_float_type, check_precision, check_pruner_metric
48
49IF HAVE_LONG_DOUBLE:
50    from .decl cimport nr_ld, ld_t
51
52IF HAVE_QD:
53    from .decl cimport nr_dd, nr_qd, dd_t, qd_t
54    from .fplll cimport FT_DD, FT_QD
55
56
57cdef class PruningParams:
58    """
59    Pruning parameters.
60    """
61    def __init__(self, gh_factor, coefficients, expectation=1.0,
62                 metric="probability", detailed_cost=tuple()):
63        """Create new pruning parameters object.
64
65        :param gh_factor: ratio of radius to Gaussian heuristic
66        :param coefficients:  a list of pruning coefficients
67        :param expectation:   success probability or number of solutions
68        :param metric:        either "probability" or "solutions"
69
70        """
71        if gh_factor <= 0:
72            raise ValueError("Radius factor must be > 0")
73
74        cdef PrunerMetric met = <PrunerMetric>check_pruner_metric(metric)
75
76        if met == PRUNER_METRIC_PROBABILITY_OF_SHORTEST:
77            if expectation <= 0 or expectation > 1:
78                raise ValueError("Probability must be between 0 and 1")
79
80        self._core.gh_factor = gh_factor
81        self._core.expectation = expectation
82        self._core.metric = met
83
84        for c in coefficients:
85            self._core.coefficients.push_back(c)
86
87        for c in detailed_cost:
88            self._core.detailed_cost.push_back(c)
89
90    @staticmethod
91    cdef PruningParams from_cxx(PruningParams_c& p):
92        """
93        Load PruningParams object from C++ PruningParams object.
94
95        .. note::
96
97           All data is copied, i.e. `p` can be safely deleted after this function returned.
98        """
99
100        cdef PruningParams self = PruningParams(1.0, ())
101        self._core = p
102        return self
103
104    @staticmethod
105    cdef to_cxx(PruningParams_c& self, PruningParams p):
106        """
107        Store pruning object in C++ pruning object.
108
109        .. note::
110
111           All data is copied, i.e. `p` can be safely deleted after this function returned.
112        """
113        self.gh_factor = p._core.gh_factor
114        self.expectation = p._core.expectation
115        self.metric = p._core.metric
116        for c in p._core.coefficients:
117            self.coefficients.push_back(c)
118        for c in p._core.detailed_cost:
119            self.detailed_cost.push_back(c)
120
121    @staticmethod
122    def LinearPruningParams(block_size, level):
123        """
124        Set all pruning coefficients to 1, except the last <level>
125        coefficients, these will be linearly with slope `-1 / block_size`.
126
127        :param block_size: block size
128        :param level: level
129        """
130        sig_on()
131        cdef PruningParams_c p = PruningParams_c.LinearPruningParams(block_size, level)
132        sig_off()
133        return PruningParams.from_cxx(p)
134
135    def __reduce__(self):
136        """
137        ::
138
139            >>> from fpylll.fplll.pruner import PruningParams
140            >>> import pickle
141            >>> print(pickle.loads(pickle.dumps(PruningParams(1.0, [1.0, 0.6, 0.3], 1.0))))
142            PruningParams<1.000000, (1.00,...,0.30), 1.0000>
143
144        """
145        return PruningParams, (self.gh_factor, self.coefficients, self.expectation, self.metric, self.detailed_cost)
146
147    def __str__(self):
148        return "PruningParams<%f, (%.2f,...,%.2f), %.4f>"%(self.gh_factor, self.coefficients[0], self.coefficients[-1], self.expectation)
149
150    @property
151    def gh_factor(self):
152        """
153        ::
154
155            >>> from fpylll.fplll.pruner import PruningParams
156            >>> pr = PruningParams(1.0, [1.0, 0.6, 0.3], 0.9)
157            >>> pr.gh_factor
158            1.0
159
160        """
161        return self._core.gh_factor
162
163    @property
164    def expectation(self):
165        """
166        ::
167
168            >>> from fpylll.fplll.pruner import PruningParams
169            >>> pr = PruningParams(1.0, [1.0, 0.6, 0.3], 0.9)
170            >>> pr.expectation
171            0.9
172
173        """
174        return self._core.expectation
175
176    @property
177    def metric(self):
178        """
179        ::
180
181            >>> from fpylll.fplll.pruner import PruningParams
182            >>> pr = PruningParams(1.0, [1.0, 0.6, 0.3], 0.9)
183            >>> pr.metric
184            'probability'
185
186        """
187        if self._core.metric == PRUNER_METRIC_PROBABILITY_OF_SHORTEST:
188            return "probability"
189        elif self._core.metric == PRUNER_METRIC_EXPECTED_SOLUTIONS:
190            return "solutions"
191        else:
192            raise NotImplementedError("Metric %d not understood"%self._core.metric)
193
194    @property
195    def coefficients(self):
196        """
197        ::
198
199            >>> from fpylll.fplll.pruner import PruningParams
200            >>> pr = PruningParams(1.0, [1.0, 0.6, 0.3], 0.9)
201            >>> pr.coefficients
202            (1.0, 0.6, 0.3)
203
204        """
205        cdef list coefficients = []
206        cdef vector[double].iterator it = self._core.coefficients.begin()
207        while it != self._core.coefficients.end():
208            coefficients.append(deref(it))
209            inc(it)
210        return tuple(coefficients)
211
212    @property
213    def detailed_cost(self):
214        """
215        ::
216
217            >>> from fpylll.fplll.pruner import PruningParams
218            >>> pr = PruningParams(1.0, [1.0, 0.6, 0.3], 0.9)
219            >>> pr.detailed_cost
220            ()
221
222        """
223        cdef list detailed_cost = []
224        cdef vector[double].iterator it = self._core.detailed_cost.begin()
225        while it != self._core.detailed_cost.end():
226            detailed_cost.append(deref(it))
227            inc(it)
228        return tuple(detailed_cost)
229
230cdef class Pruner:
231    def __init__(self, double enumeration_radius, double preproc_cost,
232                 gso_r, double target,
233                 metric="probability", int flags=PRUNER_GRADIENT,
234                 float_type="double"):
235        """
236        :param enumeration_radius: target squared enumeration radius
237        :param preproc_cost: cost of preprocessing
238        :param gso_r: r vector of GSO
239        :param target: overall targeted success probability or number of solutions
240        :param metric: "probability" or "solutions"
241        :param flags: flags
242        :param float_type: floating point type to use
243
244        EXAMPLE::
245
246            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
247            >>> FPLLL.set_random_seed(1337)
248            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
249            >>> _ = LLL.reduction(A)
250            >>> M = GSO.Mat(A)
251            >>> _ = M.update_gso()
252            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
253            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 1, metric=Pruning.EXPECTED_SOLUTIONS)
254
255        ..  note :: Preprocessing cost should be expressed in terms of nodes in an enumeration (~100
256        CPU cycles per node)
257
258        """
259        cdef FloatType float_type_ = check_float_type(float_type)
260        cdef PrunerMetric metric_ = check_pruner_metric(metric)
261        cdef fp_nr_t enumeration_radius_
262        cdef fp_nr_t preproc_cost_
263        cdef fp_nr_t target_
264
265        if preproc_cost < 1:
266            raise ValueError("Preprocessing cost must be at least 1 but got %f"%preproc_cost)
267        if metric == PRUNER_METRIC_PROBABILITY_OF_SHORTEST:
268            if target <= 0 or target >= 1.0:
269                raise ValueError("Probability must be between 0 and 1 (exclusive) but got %f"%target)
270        if metric == PRUNER_METRIC_EXPECTED_SOLUTIONS:
271            if target <= 0:
272                raise ValueError("Number of solutions must be > 0 but got %f"%target)
273
274
275        cdef vector[vector[double]] gso_r_
276
277        d = len(gso_r[0])
278        for i,m in enumerate(gso_r):
279            gso_r_.push_back(vector[double]())
280            if len(m) != d:
281                raise ValueError("Lengths of all vectors must match.")
282            for e in m:
283                gso_r_[i].push_back(e)
284
285        if float_type_ == FT_DOUBLE:
286            self._type = nr_d
287            enumeration_radius_.d = enumeration_radius
288            preproc_cost_.d = preproc_cost
289            target_.d = target
290            self._core.d = new Pruner_c[FP_NR[d_t]](enumeration_radius_.d, preproc_cost_.d, gso_r_,
291                                                    target_.d, metric_, flags)
292        elif float_type_ == FT_LONG_DOUBLE:
293            IF HAVE_LONG_DOUBLE:
294                self._type = nr_ld
295                enumeration_radius_.ld = enumeration_radius
296                preproc_cost_.ld = preproc_cost
297                target_.ld = target
298                self._core.ld = new Pruner_c[FP_NR[ld_t]](enumeration_radius_.ld, preproc_cost_.ld, gso_r_,
299                                                          target_.ld, metric_, flags)
300            ELSE:
301                raise ValueError("Float type '%s' not understood." % float_type)
302        elif float_type_ == FT_DPE:
303            self._type = nr_dpe
304            enumeration_radius_.dpe = enumeration_radius
305            preproc_cost_.dpe = preproc_cost
306            target_.dpe = target
307            self._core.dpe = new Pruner_c[FP_NR[dpe_t]](enumeration_radius_.dpe, preproc_cost_.dpe, gso_r_,
308                                                        target_.dpe, metric_, flags)
309        elif float_type_ == FT_MPFR:
310            self._type = nr_mpfr
311            enumeration_radius_.mpfr = enumeration_radius
312            preproc_cost_.mpfr = preproc_cost
313            target_.mpfr = target
314            self._core.mpfr = new Pruner_c[FP_NR[mpfr_t]](enumeration_radius_.mpfr, preproc_cost_.mpfr, gso_r_,
315                                                          target_.mpfr, metric_, flags)
316        else:
317            IF HAVE_QD:
318                if float_type_ == FT_DD:
319                    self._type = nr_dd
320                    enumeration_radius_.dd = enumeration_radius
321                    preproc_cost_.dd = preproc_cost
322                    target_.dd = target
323                    self._core.dd = new Pruner_c[FP_NR[dd_t]](enumeration_radius_.dd, preproc_cost_.dd, gso_r_,
324                                                              target_.dd, metric_, flags)
325
326                elif float_type_ == FT_QD:
327                    self._type = nr_qd
328                    enumeration_radius_.qd = enumeration_radius
329                    preproc_cost_.qd = preproc_cost
330                    target_.qd = target
331                    self._core.qd = new Pruner_c[FP_NR[qd_t]](enumeration_radius_.qd, preproc_cost_.qd, gso_r_,
332                                                              target_.qd, metric_, flags)
333                else:
334                    raise ValueError("Float type '%s' not understood."%float_type)
335            ELSE:
336                raise ValueError("Float type '%s' not understood."%float_type)
337
338    def __dealloc__(self):
339        if self._type == nr_d:
340            del self._core.d
341        IF HAVE_LONG_DOUBLE:
342            if self._type == nr_ld:
343                del self._core.ld
344        if self._type == nr_dpe:
345            del self._core.dpe
346        IF HAVE_QD:
347            if self._type == nr_dd:
348                del self._core.dd
349            if self._type == nr_qd:
350                del self._core.qd
351        if self._type == nr_mpfr:
352            del self._core.mpfr
353
354    def optimize_coefficients(self, pr):
355        """
356        Optimize pruning coefficients.
357
358        >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
359        >>> FPLLL.set_random_seed(1337)
360        >>> A = IntegerMatrix.random(60, "qary", bits=20, k=30)
361        >>> _ = LLL.reduction(A)
362        >>> M = GSO.Mat(A)
363        >>> _ = M.update_gso()
364
365        >>> pr = Pruning.Pruner(0.9*M.get_r(0,0), 2**40, [M.r()], 0.51, metric=Pruning.PROBABILITY_OF_SHORTEST)
366        >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
367        >>> pr.measure_metric(c) # doctest: +ELLIPSIS
368        0.002711...
369
370        >>> pr = Pruning.Pruner(0.9*M.get_r(0,0), 2**2, [M.r()], 1.0, metric=Pruning.EXPECTED_SOLUTIONS)
371        >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
372        >>> pr.measure_metric(c) # doctest: +ELLIPSIS
373        0.990517...
374
375        >>> pr = Pruning.Pruner(0.5*M.get_r(0,0), 2**40, [M.r()], 0.51, metric=Pruning.PROBABILITY_OF_SHORTEST, flags=Pruning.SINGLE)
376        >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
377        >>> pr.measure_metric(c) # doctest: +ELLIPSIS
378        0.515304...
379
380        >>> pr = Pruning.Pruner(0.9*M.get_r(0,0), 2**2, [M.r()], 1.0, metric=Pruning.EXPECTED_SOLUTIONS, flags=Pruning.SINGLE)
381        >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
382        >>> pr.measure_metric(c) # doctest: +ELLIPSIS
383        1.043578...
384
385        """
386        cdef vector[double] pr_
387        cdef bool called = False
388
389        d = len(pr)
390        for e in pr:
391            pr_.push_back(e)
392
393        # TODO: don't just return doubles
394        if self._type == nr_d:
395            sig_on()
396            self._core.d.optimize_coefficients(pr_)
397            called = True
398            sig_off()
399        IF HAVE_LONG_DOUBLE:
400            if self._type == nr_ld:
401                sig_on()
402                self._core.ld.optimize_coefficients(pr_)
403                called = True
404                sig_off()
405        if self._type == nr_dpe:
406            sig_on()
407            self._core.dpe.optimize_coefficients(pr_)
408            called = True
409            sig_off()
410        IF HAVE_QD:
411            if self._type == nr_dd:
412                sig_on()
413                self._core.dd.optimize_coefficients(pr_)
414                called = True
415                sig_off()
416            elif self._type == nr_qd:
417                sig_on()
418                self._core.qd.optimize_coefficients(pr_)
419                called = True
420                sig_off()
421        if self._type == nr_mpfr:
422            sig_on()
423            self._core.mpfr.optimize_coefficients(pr_)
424            called = True
425            sig_off()
426
427        if not called:
428             raise RuntimeError("Pruner object '%s' has no core."%self)
429
430        pr = []
431        for i in range(d):
432            pr.append(pr_[i])
433        return tuple(pr)
434
435    def optimize_coefficients_evec(self, pr):
436        """
437        Optimize using "even" coefficients.
438
439        Run the optimization process, successively using the algorithm activated using using half
440        coefficients: the input ``pr`` has length ``n``; but only the even indices in the vector
441        will be used in the optimization.  In the end, we have ``pr_i = pr_{i+1}``.
442
443        This function only optimizes the overall enumeration time where the target function is:
444
445        ``single_enum_cost(pr) * trials + preproc_cost * (trials - 1.0)``
446
447        :param pr: input pruning parameters
448
449        EXAMPLE::
450
451            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
452            >>> FPLLL.set_random_seed(1337)
453            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
454            >>> _ = LLL.reduction(A)
455            >>> M = GSO.Mat(A)
456            >>> _ = M.update_gso()
457            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
458            >>> c = pr.optimize_coefficients_evec([1.  for _ in range(M.d)])
459            >>> c[0:10] # doctest: +ELLIPSIS
460            (1.0, 1.0, 0.98, 0.98, 0.98, 0.98, 0.9637..., 0.9637..., 0.9591..., 0.9591...)
461
462        """
463        cdef vector[double] pr_
464        cdef bool called = False
465
466        d = len(pr)
467        for e in pr:
468            pr_.push_back(e)
469
470        # TODO: don't just return doubles
471        if self._type == nr_d:
472            sig_on()
473            self._core.d.optimize_coefficients_evec(pr_)
474            called = True
475            sig_off()
476        IF HAVE_LONG_DOUBLE:
477            if self._type == nr_ld:
478                sig_on()
479                self._core.ld.optimize_coefficients_evec(pr_)
480                called = True
481                sig_off()
482        if self._type == nr_dpe:
483            sig_on()
484            self._core.dpe.optimize_coefficients_evec(pr_)
485            called = True
486            sig_off()
487        IF HAVE_QD:
488            if self._type == nr_dd:
489                sig_on()
490                self._core.dd.optimize_coefficients_evec(pr_)
491                called = True
492                sig_off()
493            elif self._type == nr_qd:
494                sig_on()
495                self._core.qd.optimize_coefficients_evec(pr_)
496                called = True
497                sig_off()
498        if self._type == nr_mpfr:
499            sig_on()
500            self._core.mpfr.optimize_coefficients_evec(pr_)
501            called = True
502            sig_off()
503
504        if not called:
505             raise RuntimeError("Pruner object '%s' has no core."%self)
506
507        pr = []
508        for i in range(d):
509            pr.append(pr_[i])
510        return tuple(pr)
511
512    def optimize_coefficients_full(self, pr):
513        """
514        Optimize pruning coefficients using all the coefficients.
515
516        Run the optimization process, successively using the algorithm activated using using full
517        coefficients.  That is, we do not have the constraint pr_i = pr_{i+1} in this function.
518
519        Note that this function (and `optimize_coefficients_full_core()`) only optimizes the overall
520        enumeration time where the target function is:
521
522        ``single_enum_cost(pr) * trials + preproc_cost * (trials - 1.0)``
523
524        :param pr: input pruning parameters
525
526        EXAMPLE::
527
528            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
529            >>> FPLLL.set_random_seed(1337)
530            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
531            >>> _ = LLL.reduction(A)
532            >>> M = GSO.Mat(A)
533            >>> _ = M.update_gso()
534            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
535            >>> c = pr.optimize_coefficients_full([1. for _ in range(M.d)])
536            >>> c[0:10]  # doctest: +ELLIPSIS
537            (1.0, 1.0, 0.98, 0.98, 0.98, 0.98, 0.9608..., 0.9607..., 0.9574..., 0.9572...)
538
539            ..  note :: Basis shape and other parameters must have been set beforehand.
540        """
541        cdef vector[double] pr_
542        cdef bool called = False
543
544        d = len(pr)
545        for e in pr:
546            pr_.push_back(e)
547
548        # TODO: don't just return doubles
549        if self._type == nr_d:
550            sig_on()
551            self._core.d.optimize_coefficients_full(pr_)
552            called = True
553            sig_off()
554        IF HAVE_LONG_DOUBLE:
555            if self._type == nr_ld:
556                sig_on()
557                self._core.ld.optimize_coefficients_full(pr_)
558                called = True
559                sig_off()
560        if self._type == nr_dpe:
561            sig_on()
562            self._core.dpe.optimize_coefficients_full(pr_)
563            called = True
564            sig_off()
565        IF HAVE_QD:
566            if self._type == nr_dd:
567                sig_on()
568                self._core.dd.optimize_coefficients_full(pr_)
569                called = True
570                sig_off()
571            elif self._type == nr_qd:
572                sig_on()
573                self._core.qd.optimize_coefficients_full(pr_)
574                called = True
575                sig_off()
576        if self._type == nr_mpfr:
577            sig_on()
578            self._core.mpfr.optimize_coefficients_full(pr_)
579            called = True
580            sig_off()
581
582        if not called:
583             raise RuntimeError("Pruner object '%s' has no core."%self)
584
585        pr = []
586        for i in range(d):
587            pr.append(pr_[i])
588        return tuple(pr)
589
590    def optimize_coefficients_cost_vary_prob(self, pr):
591        """
592        Optimize the pruning coefficients with respect to the overall enumeration time.
593
594        The target function is: ``single_enum_cost(pr) * trials + preproc_cost * (trials - 1.0)``;
595
596        EXAMPLE::
597
598            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
599            >>> FPLLL.set_random_seed(1337)
600            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
601            >>> _ = LLL.reduction(A)
602            >>> M = GSO.Mat(A)
603            >>> _ = M.update_gso()
604            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
605            >>> c = pr.optimize_coefficients_cost_vary_prob([1. for _ in range(M.d)])
606            >>> c[0:10]  # doctest: +ELLIPSIS
607            (1.0, 1.0, 0.999..., 0.999..., 0.995..., 0.993..., 0.977..., 0.962..., 0.936..., 0.913...)
608
609        """
610        cdef vector[double] pr_
611        cdef bool called = False
612
613        d = len(pr)
614        for e in pr:
615            pr_.push_back(e)
616
617        # TODO: don't just return doubles
618        if self._type == nr_d:
619            sig_on()
620            self._core.d.optimize_coefficients_cost_vary_prob(pr_)
621            called = True
622            sig_off()
623        IF HAVE_LONG_DOUBLE:
624            if self._type == nr_ld:
625                sig_on()
626                self._core.ld.optimize_coefficients_cost_vary_prob(pr_)
627                called = True
628                sig_off()
629        if self._type == nr_dpe:
630            sig_on()
631            self._core.dpe.optimize_coefficients_cost_vary_prob(pr_)
632            called = True
633            sig_off()
634        IF HAVE_QD:
635            if self._type == nr_dd:
636                sig_on()
637                self._core.dd.optimize_coefficients_cost_vary_prob(pr_)
638                called = True
639                sig_off()
640            elif self._type == nr_qd:
641                sig_on()
642                self._core.qd.optimize_coefficients_cost_vary_prob(pr_)
643                called = True
644                sig_off()
645        if self._type == nr_mpfr:
646            sig_on()
647            self._core.mpfr.optimize_coefficients_cost_vary_prob(pr_)
648            called = True
649            sig_off()
650
651        if not called:
652             raise RuntimeError("Pruner object '%s' has no core."%self)
653
654        pr = []
655        for i in range(d):
656            pr.append(pr_[i])
657        return tuple(pr)
658
659
660    def optimize_coefficients_cost_fixed_prob(self, pr):
661        """
662        Optimize pruning coefficients with respect to the single enumeration.
663
664        Main interface to optimize the single enumeration time with the constraint such that the succ.
665        prob (or expected solutions) is fixed (and given) from input to the Pruner constructor.
666
667        EXAMPLE::
668
669            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
670            >>> FPLLL.set_random_seed(1337)
671            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
672            >>> _ = LLL.reduction(A)
673            >>> M = GSO.Mat(A)
674            >>> _ = M.update_gso()
675            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
676            >>> c = pr.optimize_coefficients_cost_fixed_prob([1. for _ in range(M.d)])
677            >>> c[0:10]  # doctest: +ELLIPSIS
678            (1.0, 1.0, 0.98, 0.98, 0.98, 0.98, 0.962..., 0.944..., 0.944..., 0.944...)
679
680        """
681        cdef vector[double] pr_
682        cdef bool called = False
683
684        d = len(pr)
685        for e in pr:
686            pr_.push_back(e)
687
688        # TODO: don't just return doubles
689        if self._type == nr_d:
690            sig_on()
691            self._core.d.optimize_coefficients_cost_fixed_prob(pr_)
692            called = True
693            sig_off()
694        IF HAVE_LONG_DOUBLE:
695            if self._type == nr_ld:
696                sig_on()
697                self._core.ld.optimize_coefficients_cost_fixed_prob(pr_)
698                called = True
699                sig_off()
700        if self._type == nr_dpe:
701            sig_on()
702            self._core.dpe.optimize_coefficients_cost_fixed_prob(pr_)
703            called = True
704            sig_off()
705        IF HAVE_QD:
706            if self._type == nr_dd:
707                sig_on()
708                self._core.dd.optimize_coefficients_cost_fixed_prob(pr_)
709                called = True
710                sig_off()
711            elif self._type == nr_qd:
712                sig_on()
713                self._core.qd.optimize_coefficients_cost_fixed_prob(pr_)
714                called = True
715                sig_off()
716        if self._type == nr_mpfr:
717            sig_on()
718            self._core.mpfr.optimize_coefficients_cost_fixed_prob(pr_)
719            called = True
720            sig_off()
721
722        if not called:
723             raise RuntimeError("Pruner object '%s' has no core."%self)
724
725        pr = []
726        for i in range(d):
727            pr.append(pr_[i])
728        return tuple(pr)
729
730
731    def single_enum_cost(self, pr, detailed_cost=False):
732        """
733        Compute the cost of a single enumeration::
734
735            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
736            >>> FPLLL.set_random_seed(1337)
737            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
738            >>> _ = LLL.reduction(A)
739            >>> M = GSO.Mat(A)
740            >>> _ = M.update_gso()
741            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
742            >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
743            >>> cost, details = pr.single_enum_cost(c, True)
744            >>> cost  # doctest: +ELLIPSIS
745            14980.48...
746
747            >>> details[0:10]  # doctest: +ELLIPSIS
748            (0.134901..., 0.3048..., 0.81588..., 1.945..., 4.5903..., 11.51..., 16.048..., 41.7115..., 48.03..., 116.986...)
749
750        """
751        cdef vector[double] pr_
752        cdef vector[double] detailed_cost_
753        cdef bool called = False
754        cost = 0.0
755
756        d = len(pr)
757        for e in pr:
758            pr_.push_back(e)
759            detailed_cost_.push_back(0.0)
760
761        # TODO: don't just return doubles
762        if self._type == nr_d:
763            sig_on()
764            cost = self._core.d.single_enum_cost(pr_, &detailed_cost_)
765            called = True
766            sig_off()
767        IF HAVE_LONG_DOUBLE:
768            if self._type == nr_ld:
769                sig_on()
770                cost = self._core.ld.single_enum_cost(pr_, &detailed_cost_)
771                called = True
772                sig_off()
773        if self._type == nr_dpe:
774            sig_on()
775            cost = self._core.dpe.single_enum_cost(pr_, &detailed_cost_)
776            called = True
777            sig_off()
778        IF HAVE_QD:
779            if self._type == nr_dd:
780                sig_on()
781                cost = self._core.dd.single_enum_cost(pr_, &detailed_cost_)
782                called = True
783                sig_off()
784            elif self._type == nr_qd:
785                sig_on()
786                cost = self._core.qd.single_enum_cost(pr_, &detailed_cost_)
787                called = True
788                sig_off()
789        if self._type == nr_mpfr:
790            sig_on()
791            cost = self._core.mpfr.single_enum_cost(pr_, &detailed_cost_)
792            called = True
793            sig_off()
794
795        if not called:
796             raise RuntimeError("Pruner object '%s' has no core."%self)
797
798        if detailed_cost:
799            detailed_cost = []
800            for i in range(d):
801                detailed_cost.append(detailed_cost_[i])
802            return cost, tuple(detailed_cost)
803        else:
804            return cost
805
806    def repeated_enum_cost(self, pr):
807        """
808        Compute the cost of r enumeration and (r-1) preprocessing, where r is the required number of
809        retrials to reach target::
810
811            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
812            >>> FPLLL.set_random_seed(1337)
813            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
814            >>> _ = LLL.reduction(A)
815            >>> M = GSO.Mat(A)
816            >>> _ = M.update_gso()
817            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
818            >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
819            >>> pr.repeated_enum_cost(c)  # doctest: +ELLIPSIS
820            15626.98...
821
822        """
823        cdef vector[double] pr_
824        cdef bool called = False
825        cost = 0.0
826
827        for e in pr:
828            pr_.push_back(e)
829
830        # TODO: don't just return doubles
831        if self._type == nr_d:
832            sig_on()
833            cost = self._core.d.repeated_enum_cost(pr_)
834            called = True
835            sig_off()
836        IF HAVE_LONG_DOUBLE:
837            if self._type == nr_ld:
838                sig_on()
839                cost = self._core.ld.repeated_enum_cost(pr_)
840                called = True
841                sig_off()
842        if self._type == nr_dpe:
843            sig_on()
844            cost = self._core.dpe.repeated_enum_cost(pr_)
845            called = True
846            sig_off()
847        IF HAVE_QD:
848            if self._type == nr_dd:
849                sig_on()
850                cost = self._core.dd.repeated_enum_cost(pr_)
851                called = True
852                sig_off()
853            elif self._type == nr_qd:
854                sig_on()
855                cost = self._core.qd.repeated_enum_cost(pr_)
856                called = True
857                sig_off()
858        if self._type == nr_mpfr:
859            sig_on()
860            cost = self._core.mpfr.repeated_enum_cost(pr_,)
861            called = True
862            sig_off()
863
864        if not called:
865             raise RuntimeError("Pruner object '%s' has no core."%self)
866
867        return cost
868
869    def measure_metric(self, pr):
870        """
871        Compute the success probability of expected number of solutions of a single enumeration::
872
873            >>> from fpylll import IntegerMatrix, GSO, LLL, Pruning, FPLLL
874            >>> FPLLL.set_random_seed(1337)
875            >>> A = IntegerMatrix.random(40, "qary", bits=20, k=20)
876            >>> _ = LLL.reduction(A)
877            >>> M = GSO.Mat(A)
878            >>> _ = M.update_gso()
879            >>> pr = Pruning.Pruner(M.get_r(0,0), 2**20, [M.r()], 0.51)
880            >>> c = pr.optimize_coefficients([1. for _ in range(M.d)])
881
882        """
883        cdef vector[double] pr_
884        cdef bool called = False
885        r = 0.0
886
887        for e in pr:
888            pr_.push_back(e)
889
890        # TODO: don't just return doubles
891        if self._type == nr_d:
892            sig_on()
893            r = self._core.d.measure_metric(pr_)
894            called = True
895            sig_off()
896        IF HAVE_LONG_DOUBLE:
897            if self._type == nr_ld:
898                sig_on()
899                r = self._core.ld.measure_metric(pr_)
900                called = True
901                sig_off()
902        if self._type == nr_dpe:
903            sig_on()
904            r = self._core.dpe.measure_metric(pr_)
905            called = True
906            sig_off()
907        IF HAVE_QD:
908            if self._type == nr_dd:
909                sig_on()
910                r = self._core.dd.measure_metric(pr_)
911                called = True
912                sig_off()
913            elif self._type == nr_qd:
914                sig_on()
915                r = self._core.qd.measure_metric(pr_)
916                called = True
917                sig_off()
918        if self._type == nr_mpfr:
919            sig_on()
920            r = self._core.mpfr.measure_metric(pr_,)
921            called = True
922            sig_off()
923
924        if not called:
925             raise RuntimeError("Pruner object '%s' has no core."%self)
926
927        return r
928
929def prune(double enumeration_radius, double preproc_cost, gso_r, double target,
930          metric="probability", int flags=PRUNER_GRADIENT, pruning=None, float_type="double"):
931    """Return optimal pruning parameters.
932
933    :param enumeration_radius: target squared enumeration radius
934    :param preproc_cost: cost of preprocessing
935    :param gso_: list (of lists) with r coefficients
936    :param target: overall targeted success probability or number of solutions
937    :param metric: "probability" or "solutions"
938    :param flags: flags
939    :param pruning: write output here, pass ``None`` for creating a new one
940    :param float_type: floating point type to use
941
942    EXAMPLE::
943
944        >>> from fpylll import IntegerMatrix, LLL, GSO, FPLLL
945        >>> from fpylll import FPLLL
946        >>> from fpylll import Pruning
947        >>> FPLLL.set_random_seed(1337)
948        >>> A = IntegerMatrix.random(20, "qary", bits=20, k=10)
949        >>> M = GSO.Mat(A)
950        >>> LLL.Reduction(M)()
951        >>> _ = FPLLL.set_precision(128)
952        >>> R = [M.get_r(i,i) for i in range(0, 20)]
953        >>> pr0 = Pruning.run(R[0], 2**20, [R], 0.5, float_type="double")
954        >>> pr1 = Pruning.run(R[0], 2**20, [R], 0.5, float_type="mpfr")
955
956        >>> pr0.coefficients[10], pr1.coefficients[10] # doctest: +ELLIPSIS
957        (0.6266..., 0.6266...)
958
959        >>> pr0 = Pruning.run(R[0], 2**10, [R], 0.5, flags=Pruning.GRADIENT, float_type="double")
960        >>> pr1 = Pruning.run(R[0], 2**10, [R], 0.5, flags=Pruning.NELDER_MEAD, float_type="mpfr")
961        >>> pr0.coefficients[10], pr1.coefficients[10] # doctest: +ELLIPSIS
962        (0.70722482938..., 0.824291475...)
963
964    ..  note :: Preprocessing cost should be expressed in terms of nodes in an enumeration (~100
965        CPU cycles per node)
966    """
967    if preproc_cost < 1:
968        raise ValueError("Preprocessing cost must be at least 1 but got %f"%preproc_cost)
969    if metric == PRUNER_METRIC_PROBABILITY_OF_SHORTEST:
970        if target <= 0 or target >= 1.0:
971            raise ValueError("Probability must be between 0 and 1 (exclusive) but got %f"%target)
972    if metric == PRUNER_METRIC_EXPECTED_SOLUTIONS:
973        if target <= 0:
974            raise ValueError("Number of solutions must be > 0 but got %f"%target)
975
976    cdef FloatType ft = check_float_type(float_type)
977    metric = check_pruner_metric(metric)
978
979    try:
980        gso_r[0][0]
981    except (AttributeError, TypeError):
982        gso_r = [gso_r]
983
984    if pruning is None:
985        pruning = PruningParams(1.0, [], 1.0)
986    elif not isinstance(pruning, PruningParams):
987        raise TypeError("First parameter must be of type PruningParams or None but got type '%s'"%type(pruning))
988
989    cdef vector[vector[double]] gso_r_
990
991    d = len(gso_r[0])
992    for i,m in enumerate(gso_r):
993        gso_r_.push_back(vector[double]())
994        if len(m) != d:
995            raise ValueError("Lengths of all vectors must match.")
996        for e in m:
997            gso_r_[i].push_back(e)
998
999    if ft == FT_DOUBLE:
1000        sig_on()
1001        prune_c[FP_NR[double]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1002                               gso_r_, target, metric, flags)
1003        sig_off()
1004        return pruning
1005    IF HAVE_LONG_DOUBLE:
1006        if ft == FT_LONG_DOUBLE:
1007            sig_on()
1008            prune_c[FP_NR[longdouble]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1009                                       gso_r_, target, metric, flags)
1010            sig_off()
1011            return pruning
1012    if ft == FT_DPE:
1013        sig_on()
1014        prune_c[FP_NR[dpe_t]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1015                              gso_r_, target, metric, flags)
1016        sig_off()
1017        return pruning
1018    if ft == FT_MPFR:
1019        sig_on()
1020        prune_c[FP_NR[mpfr_t]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1021                               gso_r_, target, metric, flags)
1022        sig_off()
1023        return pruning
1024    IF HAVE_QD:
1025            if ft == FT_DD:
1026                sig_on()
1027                prune_c[FP_NR[dd_t]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1028                                     gso_r_, target, metric, flags)
1029                sig_off()
1030                return pruning
1031            elif ft == FT_QD:
1032                sig_on()
1033                prune_c[FP_NR[qd_t]]((<PruningParams>pruning)._core, enumeration_radius, preproc_cost,
1034                                     gso_r_, target, metric, flags)
1035                sig_off()
1036                return pruning
1037
1038
1039def svp_probability(pr, float_type="double"):
1040    """Return probability of success for enumeration with given set of pruning parameters.
1041
1042    :param pr: pruning parameters, either PruningParams object or list of floating point numbers
1043    :param float_type: floating point type used internally
1044
1045    """
1046    cdef FloatType ft = check_float_type(float_type)
1047
1048    if not isinstance(pr, PruningParams):
1049        pr = PruningParams(1.0, pr, 1.0)
1050
1051    if ft == FT_DOUBLE:
1052        return svp_probability_c[FP_NR[double]]((<PruningParams>pr)._core.coefficients).get_d()
1053    IF HAVE_LONG_DOUBLE:
1054        if ft == FT_LONG_DOUBLE:
1055            return svp_probability_c[FP_NR[longdouble]]((<PruningParams>pr)._core.coefficients).get_d()
1056    if ft == FT_DPE:
1057        return svp_probability_c[FP_NR[dpe_t]]((<PruningParams>pr)._core.coefficients).get_d()
1058    if ft == FT_MPFR:
1059        return svp_probability_c[FP_NR[mpfr_t]]((<PruningParams>pr)._core.coefficients).get_d()
1060    IF HAVE_QD:
1061        if ft == FT_DD:
1062            return svp_probability_c[FP_NR[dd_t]]((<PruningParams>pr)._core.coefficients).get_d()
1063        elif ft == FT_QD:
1064            return svp_probability_c[FP_NR[qd_t]]((<PruningParams>pr)._core.coefficients).get_d()
1065
1066    raise ValueError("Float type '%s' not understood."%float_type)
1067
1068class Pruning:
1069    Pruner = Pruner
1070    PruningParams = PruningParams
1071    LinearPruningParams = PruningParams.LinearPruningParams
1072    run = staticmethod(prune)
1073
1074    CVP = PRUNER_CVP
1075    START_FROM_INPUT = PRUNER_START_FROM_INPUT
1076    GRADIENT = PRUNER_GRADIENT
1077    NELDER_MEAD = PRUNER_NELDER_MEAD
1078    VERBOSE = PRUNER_VERBOSE
1079    ZEALOUS = PRUNER_GRADIENT | PRUNER_NELDER_MEAD
1080    SINGLE = PRUNER_SINGLE
1081    HALF = PRUNER_HALF
1082    PROBABILITY_OF_SHORTEST = PRUNER_METRIC_PROBABILITY_OF_SHORTEST
1083    EXPECTED_SOLUTIONS = PRUNER_METRIC_EXPECTED_SOLUTIONS
1084