1#!/usr/local/bin/python3.8 2# 3# Examples of parameters can be found in the params_dl directory 4 5# Algorithms 6# UpperClass: 7# This is the initialization of the descent. An extended gcd is 8# done between the target and p to get two "rational 9# reconstructions", and then a sieving procedure is done to find a 10# linear combination of these that is smooth (idea taken from 11# Joux-Lercier). This sieving is done with las, with two linear 12# polynomials. 13# At the end, we have target = num/den, where num and den are 14# smooth, with a smoothness bound that is large than the one that 15# was used in the sieving / linear algebra. 16# MiddleClass: 17# For all the primes dividing num and den that are larger than the 18# large prime bound, a "special-q descent" is performed, in order 19# to rewrite them in terms of smaller and smaller elements, until 20# everything is known. This is done with the las_descent program. 21# LowerClass: 22# This step is just putting everything together. 23# In practice, this means computing appropriate Schirokauer maps 24# and propagating the known logarithms in relations in order to 25# deduce the unknown ones. The main tool for that is the 26# reconstructlog program. Some ugly modifications of input files 27# are necessary. 28 29 30# TODO: do we want to have default values, as done here, for the --init-* 31# arguments ? I would say no. 32 33# TODO: of course this is currently kludgy, and does not properly wield 34# the power of the cadofactor python programs. I need to understand that 35# stuff better. 36 37# TODO: keep las awake, if needed. 38 39# TODO: make output less verbose. 40 41import os 42import io 43import sqlite3 44import subprocess 45import sys 46import argparse 47import re 48import math 49import time 50import tempfile 51import shutil 52import functools 53import itertools 54import random 55from queue import Queue, Empty 56from threading import Thread 57 58has_hwloc = None 59 60# This gives the boring info about the file names for everything, and 61# the boilerplate arguments to be fed to binaries. 62class GeneralClass(object): 63 64 def declare_args(parser): 65 # parser.add_argument("--no-wipe", 66 # help="Keep working files", 67 # action="store_true") 68 parser.add_argument("--sm-mode", 69 help="Select SM mode", 70 type=str) 71 parser.add_argument("--datadir", 72 help="cadofactor working directory", 73 type=str) 74 parser.add_argument("--prefix", 75 help="project prefix", 76 type=str) 77 parser.add_argument("--db", 78 help="SQLite db file name", 79 type=str) 80 # the next few are optional file names 81 parser.add_argument("--tmpdir", help="Temporary working directory") 82 parser.add_argument("--cadobindir", 83 help="Cado build directory", 84 required=True, 85 type=str) 86# parser.add_argument("--todofile", 87# help="Output primes to this toto file", 88# type=str) 89 parser.add_argument("--poly", 90 help="Polynomial file", 91 type=str) 92 parser.add_argument("--renumber", 93 help="Renumber file", 94 type=str) 95 parser.add_argument("--badidealinfo", 96 help="Badideal info file", 97 type=str) 98 parser.add_argument("--fb1", 99 help="Factor base file for the algebraic side", 100 type=str) 101 parser.add_argument("--log", 102 help="File with known logs", 103 type=str) 104 parser.add_argument("--gfpext", 105 help="Degree of extension (default 1)", 106 type=int) 107 parser.add_argument("--numbertheorydata", 108 help="File with numbertheory data", 109 type=str) 110 # This one applies to both las in the initial step 111 parser.add_argument("--threads", 112 help="Number of threads to use", 113 type=int, default=4) 114 # the arguments below are really better fetched form the 115 # database. 116 parser.add_argument("--ell", help="Group order (a.k.a. ell)") 117 parser.add_argument("--nsm0", help="Number of SM on side 0") 118 parser.add_argument("--nsm1", help="Number of SM on side 1") 119 # Those are used both for the middle and lower levels of the 120 # descent. 121 for side in range(2): 122 parser.add_argument("--lpb%d" % side, 123 help="Default large prime bound on side %d" % side, 124 required=True, 125 type=int) 126 127 def __init__(self, args): 128 self._conn = None 129 self.args = args 130 if bool(args.db) == bool(args.prefix and args.datadir): 131 raise ValueError("Either --db (with an sqlite db) or the combo --prefix + --datadir must be specified") 132 if args.tmpdir: 133 self._tmpdir = args.tmpdir 134 # do mkdir ??? 135 else: 136 self._tmpdir = tempfile.mkdtemp(dir="/tmp") 137 self.numbertheorydata=None 138 self.hello() 139 self.__load_badidealdata() 140 self.logDB = LogBase(self) 141 self.initrandomizer = 1 142 143 def __connect(self): 144 if args.db and not self._conn: 145 self._conn = sqlite3.connect(args.db) 146 147 def __getdb(self, query): 148 if not args.db: 149 return None 150 self.__connect() 151 self._cursor = self._conn.cursor() 152 self._cursor.execute(query) 153 v=self._cursor.fetchone() 154 self._cursor.close() 155 return v 156 157 def __getfile(self, shortname, typical, table, key): 158 try: 159 v=self.args.__dict__[shortname] 160 if v: 161 return v 162 except KeyError: 163 pass 164 if args.db and table: 165 v=self.__getdb("select value from %s where kkey='%s'" % (table, key)) 166 if v is not None and len(v) > 0: 167 return os.path.join(os.path.dirname(args.db), v[0]) 168 elif args.datadir and args.prefix: 169 return os.path.join(args.datadir, args.prefix + "." + typical) 170 raise ValueError("no %s file known" % shortname) 171 172 def __getarg(self, shortname, table, key): 173 try: 174 v=self.args.__dict__[shortname] 175 if v: 176 return v 177 except KeyError: 178 pass 179 if args.db: 180 v=self.__getdb("select value from %s where kkey='%s'" % (table, key)) 181 if v is not None and len(v) > 0: 182 return v[0] 183 raise ValueError("no %s parameter known" % shortname) 184 185 def prefix(self): 186 if args.prefix: 187 return args.prefix 188 else: 189 return os.path.basename(args.db).split('.')[0] 190 191 def datadir(self): 192 if args.datadir: 193 return args.datadir 194 elif args.db: 195 return os.path.dirname(args.db) 196 else: 197 raise ValueError("Need --datadir or --db with an sqlite db") 198 199 def poly(self): 200 return self.__getfile("poly", "poly", "polyselect2", "polyfilename") 201 def renumber(self): 202 return self.__getfile("renumber", "renumber.gz", "freerel", "renumberfilename") 203 204 def log(self): 205 return self.__getfile("log", "dlog", "reconstructlog", "dlog") 206 def badideals(self): 207 return self.__getfile("badideals", "badideals", "numbertheory", "badidealsfile") 208 def badidealinfo(self): 209 return self.__getfile("badidealinfo", "badidealinfo", "numbertheory", "badidealinfofile") 210 def fb1(self): 211 return self.__getfile("fb1", "roots1.gz", "factorbase", "outputfile") 212 def fb0(self): 213 return self.__getfile("fb0", "roots0.gz", "factorbase", "outputfile") 214 def ell(self): 215 return int(args.ell) 216 def lpb0(self): 217 return args.lpb0 218 def lpb1(self): 219 return args.lpb1 220 def tmpdir(self): 221 return self._tmpdir 222 def threads(self): 223 return int(args.threads) 224 def poly_data(self): 225 d={} 226 with open(self.poly(), "r") as file: 227 for line in file: 228 if re.match("^\s*#", line): 229 continue 230 if re.match("^\s*$", line): 231 continue 232 key,value=line.split(":") 233 key = key.strip() 234 foo = re.match("^([cY])(\d+)$", key) 235 if foo: 236 s,i=foo.groups() 237 if s not in d: 238 d[s]=[] 239 while int(i) >= len(d[s]): 240 d[s]+=[None] 241 d[s][int(i)]=value.strip() 242 else: 243 d[key] = value.strip() 244 if 'poly0' in d: 245 assert 'Y' not in d 246 d['Y'] = [ int(x) for x in d["poly0"].split(',') ] 247 if 'poly1' in d: 248 assert 'c' not in d 249 d['c'] = [ int(x) for x in d["poly1"].split(',') ] 250 return d 251 252 def p(self): 253 d=self.poly_data() 254 return int(d["n"]) 255 def extdeg(self): 256 if args.gfpext: 257 return args.gfpext 258 else: 259 return 1 260 261 def target(self): 262 if self.extdeg() == 1: 263 return int(args.target) 264 else: 265 return [int(x) for x in args.target.split(",")] 266 267 # short name for the target, to be used in filenames 268 def short_target(self): 269 target = str(self.args.target) 270 if len(target) <= 20: 271 return target 272 else: 273 return target[:10] + "..." + target[-10:] 274 275 def has_rational_side(self): 276 d=self.poly_data() 277 return len(d["Y"]) == 2 278 def rational_poly(): 279 d=self.poly_data() 280 assert len(d["Y"]) == 2 281 return [int(x) for x in d["Y"]] 282 def algebraic_poly(): 283 d=self.poly_data() 284 return [int(x) for x in d["c"]] 285 286 def cleanup(self): 287 if False: 288# if not self.args.tmpdir and not self.args.no_wipe: 289 shutil.rmtree(self.tmpdir()) 290 291 def __del__(self): 292 if self._conn: 293 self._conn.close() 294 295 def descentinit_bin(self): 296 return os.path.join(args.cadobindir, "misc", "descent_init_Fp") 297 298 def las_bin(self): 299 return os.path.join(args.cadobindir, "sieve", "las") 300 def sm_simple_bin(self): 301 return os.path.join(args.cadobindir, "filter", "sm_simple") 302 303 def lasMiddle_base_args(self): 304 # TODO add threads once it's fixed. 305 s=[ 306 self.las_bin() + "_descent", 307 "--recursive-descent", 308 "--allow-largesq", 309 "--never-discard", # useful for small computations. 310 "--renumber", self.renumber(), 311 "--log", self.log(), 312 "--fb1", self.fb1(), 313 "--poly", self.poly(), 314 ] 315 if not self.has_rational_side(): 316 s.append("--fb0") 317 s.append(self.fb0()) 318 return [ str(x) for x in s ] 319 320 # There's no las_init_base_args, since DescentUpperClass uses only 321 # its very own arguments. 322 323 def hello(self): 324 print("Working in GF(p), p=%d" % self.p()) 325 print("Subgroup considered in GF(p)^* has size %d" % self.ell()) 326 print("prefix is %s" % self.prefix()) 327 errors=[] 328 if not os.path.exists(self.las_bin()): 329 errors.append("las not found (make las ?)") 330 if not os.path.exists(self.las_bin() + "_descent"): 331 errors.append("las_descent not found (make las_descent ?)") 332 if not os.path.exists(self.sm_simple_bin()): 333 errors.append("sm_simple not found (make sm_simple ?)") 334 for f in [ self.log(), self.badidealinfo(), self.poly(), self.renumber(), self.log(), self.fb1() ]: 335 if not os.path.exists(f): 336 errors.append("%s missing" % f) 337 if len(errors): 338 msg = "Some data files and/or binaries missing:\n" 339 msg += "\n".join(["\t"+x for x in errors]) 340 raise RuntimeError(msg) 341 342 # self.list_badideals will contain a list of (p,r,side) 343 # self.list_bad_ncols will contain a list of the corresponding nb of cols 344 # self.badidealdata will contain a list of 345 # (p, k, rk, side, [exp1, exp2, ..., expi]) 346 def __load_badidealdata(self): 347 self.list_badideals = [] 348 self.list_ncols = [] 349 with open(self.badideals(), 'r') as bad: 350 for line in bad: 351 if line[0] == '#': 352 continue 353 foo = re.match("^(\d+),(\d+):(\d+): (\d+)$", line) 354 if foo: 355 self.list_badideals.append((int(foo.groups()[0]), 356 int(foo.groups()[1]), int(foo.groups()[2]))) 357 self.list_ncols.append(int(foo.groups()[3])) 358 else: 359 raise ValueError("Error while reading %s" % self.badideal()) 360 361 self.badidealdata = [] 362 with open(self.badidealinfo(), 'r') as bad: 363 for line in bad: 364 if line[0] == '#': 365 continue 366 pattern = "^(\d+) (\d+) (\d+) (\d+) (.+)$" 367 foo = re.match(pattern, line) 368 if foo: 369 self.badidealdata.append(( 370 int(foo.groups()[0]), # p 371 int(foo.groups()[1]), # k 372 int(foo.groups()[2]), # rk 373 int(foo.groups()[3]), # side 374 [ int(x) for x in foo.groups()[4].split() ] # exp 375 )) 376 else: 377 raise ValueError("Error while reading %s" % 378 self.badidealinfo()) 379 print ("Bad ideal information loaded: %s bad ideals, and %s lines in badidealinfo" 380 % (str(len(self.list_badideals)), str(len(self.badidealdata)))) 381 print ("badideal data: %s" % str(self.badidealdata)) 382 383 384def check_result(two, log2, z, logz, p, ell): 385 assert (p-1) % ell == 0 386 assert pow(z, log2*((p-1) // ell), p) == pow(2, logz*((p-1) // ell), p) 387 print ("Final consistency check ok!") 388 389 390# A memory image of the reconstructlog.dlog file. 391class LogBase(object): 392 def __init__(self, general): 393 self.known={} 394 self.badideals=[] 395 self.SMs = [ [], [] ] 396 self.fullcolumn = None 397 try: 398 print ("--- Reading %s to find which are the known logs ---" % general.log()) 399 def process(line): 400 index,p,side,r,*value = line.split() 401 if p == "bad" and side == "ideals": 402 self.badideals.append(int(r)) 403 elif p == "SM": 404 self.SMs[int(side)].append(int(value[0])) 405 else: 406 # for rational side, we actually don't have the root. 407 # We force it to be on side 0 (legacy, again...) 408 if r == "rat": 409 assert int(side) == 0; 410 r = -1 411 else: 412 r = int(r, 16) 413 self.known[(int(p, 16),r,int(side))] = int(value[0]) 414 415 with open(general.log(),'r') as file: 416 line = file.readline() 417 m = re.match("^(\w+) added column (\d+)$",line) 418 if m: 419 self.fullcolumn = int(m.groups()[1]) 420 else: 421 self.fullcolumn = None 422 process(line) 423 for i,line in enumerate(file): 424 if i % 1000000 == 0: 425 print("Reading line %d" % i) 426 process(line) 427 print("Found %d bad ideals, %d known logs, and %d,%d SMs in %s" 428 %(len(self.badideals), len(self.known), 429 len(self.SMs[0]), len(self.SMs[1]), general.log())) 430 except: 431 raise ValueError("Error while reading %s" % general.log()) 432 def has(self,p,r,side): 433 return (p,r,side) in self.known 434 def get_log(self, p,r,side): 435 if general.has_rational_side() and side == 0: 436 r = -1; 437 if (p,r,side) in self.known: 438 return self.known[(p,r,side)] 439 else: 440 return None 441 def add_log(self,p,r,side,log): 442 if general.has_rational_side() and side == 0: 443 r = -1; 444 self.known[(p,r,side)] = log 445 def bad_ideal(self,i): 446 return self.badideals[i] 447 def allSM(self,i): 448 SM = self.SMs[0] + self.SMs[1] 449 return SM[i] 450 def nSM(self,side): 451 return len(self.SMs[side]) 452 def SM(self,side,i): 453 return self.SMs[side][i] 454 def full_column(self): 455 return self.fullcolumn 456 457def a_over_b_mod_p(a, b, p): 458 if b%p == 0: 459 return p 460 ib = pow(b, p-2, p) 461 return (a*ib) % p 462 463def is_a_over_b_equal_r_mod_pk(a, b, rk, p, pk): 464 if b%p != 0: # non-projective 465 if rk >= pk: 466 return False 467 return (a-b*rk) % pk == 0 468 else: # projective 469 if rk < pk: 470 return False 471 return (b-a*rk) % pk == 0 472 473class ideals_above_p(object): 474 def __is_badideal(self, p, r, side, general): 475 return (p,r,side) in general.list_badideals 476 477 def __handle_badideal(self, p, k, a, b, r, side, general): 478 baddata = general.badidealdata 479 expo = [] 480 badid = None 481 for X in baddata: 482 if side != X[3] or p != X[0]: 483 continue; 484 pk = pow(p, X[1]) 485 rk = X[2] 486 if not is_a_over_b_equal_r_mod_pk(a,b,rk,p,pk): 487 continue; 488 vals = X[4] 489 badid = (p, r, side) 490 for v in vals: 491 if v > 0: 492 exp = v 493 else: 494 assert k >= -v 495 exp = k+v 496 expo.append(exp) 497 break 498 if badid == None: 499 raise ValueError("Error while handling badideal p=%d side=%d a=%d b=%d" % (p, side, a, b)) 500 return {"badid":badid, "exp":expo} 501 502 def __init__(self, p, k, a, b, side, general): 503 self.logDB = general.logDB 504 self.p = p 505 self.k = k 506 self.side = side 507 if general.has_rational_side() and side == 0: 508 self.r = -1 509 else: 510 self.r = a_over_b_mod_p(a, b, p) 511 self.isbad = self.__is_badideal(p, self.r, side, general) 512 if self.isbad: 513 self.bads = self.__handle_badideal(p, k, a, b, self.r, side, general) 514 515 # return an unreduced virtual log or None if unknown. 516 def get_log(self): 517 if not self.isbad: 518 l = self.logDB.get_log(self.p, self.r, self.side) 519 if l == None: 520 return None 521 else: 522 return self.k * l 523 else: 524 ind_b = 0 525 ind_l = 0 526 while general.list_badideals[ind_b] != (self.p, self.r, self.side): 527 ind_l += general.list_ncols[ind_b] 528 ind_b += 1 529 logs = [self.logDB.bad_ideal(x) for x in range(ind_l, 530 ind_l+general.list_ncols[ind_b]) ] 531 assert len(logs) == len(self.bads["exp"]) 532 log = 0 533 for i in range(len(logs)): 534 log = log + logs[i]*self.bads["exp"][i] 535 return log 536 537class important_file(object): 538 def __init__(self, outfile, call_that): 539 self.child = None 540 print("command line:\n" + " ".join(call_that)) 541 if os.path.exists(outfile): 542 print("reusing file %s" % outfile) 543 self.reader = open(outfile,'r') 544 self.writer = None 545 else: 546 print("running program, saving output to %s" % outfile) 547 self.child = subprocess.Popen(call_that, stdout=subprocess.PIPE) 548 self.reader = io.TextIOWrapper(self.child.stdout, 'utf-8') 549 self.writer = open(outfile, 'w') 550 551 def streams(self): 552 return self.reader, self.writer 553 554 def __iter__(self): 555 return self 556 557 def __next__(self): 558 line = next(self.reader) 559 if self.writer is not None: 560 self.writer.write(line) 561 self.writer.flush() 562 return line 563 564 def __enter__(self): 565 return self 566 567 def __exit__(self, *args): 568 if self.child is not None: 569 # self.writer.close() 570 print("Waiting for child to finish") 571 # self.child.kill() 572 # self.reader.close() # do I need to put it before ? broken pipe ? 573 for line in self.reader: 574 self.writer.write(line) 575 self.writer.flush() 576 self.reader.close() 577 self.writer.close() 578 print("ok, done") 579 else: 580 self.reader.close() 581 582class DescentUpperClass(object): 583 def declare_args(parser): 584 c = " (specific for the descent bootstrap)" 585 parser.add_argument("--init-tkewness", 586 help="Tkewness"+c, 587 type=int, 588 default=2**30) 589 parser.add_argument("--init-lim", 590 help="Factor base bound"+c, 591 default=2**26) 592 parser.add_argument("--init-lpb", 593 help="Large prime bound"+c, 594 default=64) 595 parser.add_argument("--init-mfb", 596 help="Cofactor bound"+c, 597 default=100) 598 parser.add_argument("--init-ncurves", 599 help="ECM effort in cofactorization"+c, 600 default=80) 601 parser.add_argument("--init-I", 602 help="Sieving range"+c, 603 default=14) 604 parser.add_argument("--init-minB1", 605 help="ECM first B1" + c, 606 default=200) 607 parser.add_argument("--init-mineff", 608 help="ECM minimal effort" + c, 609 default=1000) 610 parser.add_argument("--init-maxeff", 611 help="ECM maximal effort" + c, 612 default=100000) 613 parser.add_argument("--init-side", 614 help="Side of the bootstrap (when there is no rational side)", 615 default=1) 616 # Slave las processes in the initial step. 617 parser.add_argument("--slaves", 618 help="Number of slaves to use", 619 type=int, default=1) 620 # In case we used an external process 621 parser.add_argument("--external-init", 622 help="Use precomputed external data for the descent bootstrap", 623 type=str, 624 default=None) 625 626 def __init__(self, general, args): 627 self.general = general 628 self.logDB = general.logDB 629 630 if args.external_init != None: 631 self.external = args.external_init 632 if not os.path.exists(self.external): 633 raise NameError("Given external file for init does not exist") 634 else: 635 self.external = None 636 self.tkewness = int(args.init_tkewness) 637 self.lim = int(args.init_lim) 638 self.lpb = int(args.init_lpb) 639 self.mfb = int(args.init_mfb) 640 self.ncurves = int(args.init_ncurves) 641 self.I = int(args.init_I) 642 self.side = int(args.init_side) 643 self.mineff = int(args.init_mineff) 644 self.maxeff = int(args.init_maxeff) 645 self.minB1 = int(args.init_minB1) 646 self.slaves = int(args.slaves) 647 # the final step needs to know the init side as well. 648 general.init_side = int(args.init_side) 649 650 def __isqrt(self, n): 651 x = n 652 y = (x + 1) // 2 653 while y < x: 654 x = y 655 y = (x + n // x) // 2 656 return x 657 658 def __myxgcd(self, a, b, T): 659 assert type(a) == int 660 assert type(b) == int 661 assert type(T) == int 662 ainit = a 663 binit = b 664 bound = self.__isqrt(b*T) 665 x = 0 666 lastx = 1 667 y = 1 668 lasty = 0 669 while abs(b) > bound: 670 q = a // b 671 r = a % b 672 a = b 673 b = r 674 newx = lastx - q*x 675 lastx = x 676 x = newx 677 newy = lasty - q*y 678 lasty = y 679 y = newy 680 return [ [ b, x ], [ a, lastx ] ] 681 682 def use_external_data(self, z): 683 fil = open(self.external, "r") 684 rrr = fil.read() 685 fil.close() 686 lines = rrr.splitlines() 687 e = int(lines[0]) 688 Num = int(lines[1]) 689 Den = int(lines[2]) 690 ## check that we are talking about the same z! 691 p = general.p() 692 zz = pow(z, e, p) 693 assert (zz*Den-Num) % p == 0 694 general.initrandomizer = e # for later use 695 fnum = [ int(x) for x in lines[3].split() ] 696 fden = [ int(x) for x in lines[4].split() ] 697 large_q = [ int(x) for x in lines[5].split() ] 698 descrelfile = lines[6] 699 700 ## create todolist from fnum and fden, skipping primes of the 701 ## large_q list 702 prefix = general.prefix() + ".descent.%s.init." % general.short_target() 703 todofilename = os.path.join(general.datadir(), prefix + "todo") 704 with open(todofilename, "w") as f: 705 for q in fnum + fden: 706 if q in large_q: 707 continue 708 if self.logDB.has(q,-1,0): 709 continue 710 logq = math.ceil(math.log(q, 2)) 711 print("Will do further descent for %d-bit rational prime %d" 712 % (logq, q)) 713 # las can understand when the rational root is missing 714 f.write("0 %d\n" % q) 715 fil = open(descrelfile, "r") 716 rrr = fil.read() 717 fil.close() 718 lines = rrr.splitlines() 719 with open(todofilename, "a") as f: 720 for line in lines: 721 foo = re.match("^Taken: ([0-9\-]+),([0-9\-]+):([0-9a-fA-F,]+):([0-9a-fA-F,]+)", line) 722 assert foo 723 foog = foo.groups() 724 a = int(foog[0]) 725 b = int(foog[1]) 726 list_p = [[int(x, 16) for x in foog[i].split(",") ] for i in [2, 3]] 727 for side in range(2): 728 for p in list_p[side]: 729 if p in large_q: 730 continue 731 if side == 0: 732 if not self.logDB.has(p,-1,0): 733 f.write("0 %d\n" % p) 734 else: 735 if b % p == 0: 736 continue 737 ideal = ideals_above_p(p, 1, a, b, side, general) 738 if ideal.get_log() != None: 739 continue 740 else: 741 r = a_over_b_mod_p(a, b, p) 742 f.write("1 %d %d\n" % (p, r)) 743 return todofilename, [Num, Den, fnum, fden], descrelfile 744 745 def do_descent_for_real(self, z, seed): 746 p = general.p() 747 bound = p.bit_length() // 2 + 20 748 # make the randomness deterministic to be able to replay 749 # interrupted computations. 750 random.seed(seed) 751 general.initrandomizer = random.randrange(p) 752 while True: 753 zz = pow(z, general.initrandomizer, p) 754 gg = self.__myxgcd(zz, p, self.tkewness) 755 if (gg[0][0].bit_length() < bound and 756 gg[1][0].bit_length() < bound and 757 gg[0][1].bit_length() < bound and 758 gg[1][1].bit_length() < bound): 759 break 760 print ("Skewed reconstruction. Let's randomize the input.") 761 general.initrandomizer = random.randrange(p) 762 763 tmpdir = general.tmpdir() 764 prefix = general.prefix() + ".descent.%s.%s.init." % (general.short_target(), seed) 765 766 polyfilename = os.path.join(tmpdir, prefix + "poly") 767 with open(polyfilename, 'w') as f: 768 f.write("n: %d\n" % p) 769 f.write("skew: 1\n") 770 f.write("c1: %d\n" % gg[0][0]) 771 f.write("c0: %d\n" % gg[1][0]) 772 f.write("Y1: %d\n" % gg[0][1]) 773 f.write("Y0: %d\n" % gg[1][1]) 774 775 print ("--- Sieving (initial) ---") 776 relsfilename = os.path.join(general.datadir(), prefix + "rels") 777 if os.path.exists(relsfilename): 778 processes = [important_file(relsfilename,[])] 779 else: 780 fbcfilename = os.path.join(tmpdir, prefix + "fbc") 781 call_common = [ general.las_bin(), 782 "-poly", polyfilename, 783 "-lim0", self.lim, 784 "-lim1", self.lim, 785 "-lpb0", self.lpb, 786 "-lpb1", self.lpb, 787 "-mfb0", self.mfb, 788 "-mfb1", self.mfb, 789 "-ncurves0", self.ncurves, 790 "-ncurves1", self.ncurves, 791 "-fbc", fbcfilename, 792 "-I", self.I 793 ] 794 def fbc_call(): 795 call_that = call_common + [ 796 "-q0", self.tkewness, 797 "-q1", self.tkewness, 798 "-nq", 0, 799 "-t", "machine,1,pu" if has_hwloc else "4" 800 ] 801 call_that = [str(x) for x in call_that] 802 return call_that 803 def construct_call(q0,q1): 804 call_that = call_common + [ 805 "-q0", q0, 806 "-q1", q1, 807 "--exit-early", 2, 808 "-t", "auto" if has_hwloc else "4" 809 ] 810 call_that = [str(x) for x in call_that] 811 return call_that 812 813 call_params = [(os.path.join(relsfilename+"."+str(i)), # outfile 814 self.tkewness+100000*i, #q0 815 self.tkewness+100000*(i+1)) for i in range(self.slaves)] #q1 816 817 if not os.path.exists(fbcfilename): 818 all_ok = True 819 for t in call_params: 820 if not os.path.exists(t[0]): 821 all_ok = False 822 break 823 824 if all_ok: 825 print (" - Using %s" % fbcfilename) 826 else: 827 print (" - Factor base cache -") 828 with open(os.devnull, 'w') as devnull: 829 subprocess.check_call(fbc_call(), stdout=devnull) 830 print (" - done -") 831 832 # Whether or not the output files are already present, this 833 # will do the right thing and run the new processes only if 834 # needed. 835 processes = [important_file(outfile, construct_call(q0,q1)) for (outfile,q0,q1) in call_params] 836 837 q = Queue() 838 def enqueue_output(i,out,q): 839 for line in out: 840 q.put((i,line)) 841 #q.close() 842 843 threads = [Thread(target=enqueue_output, args=(i,process,q)) for (i,process) in enumerate(processes)] 844 for t in threads: 845 t.daemon = True 846 t.start() 847 848 rel = None 849 while True: 850 try: 851 i,line = q.get_nowait() 852 except Empty: 853 if all([not t.is_alive() for t in threads]): 854 if q.empty(): 855 break 856 else: 857 continue 858 else: 859 if line[0] != '#': 860 rel = line.strip() 861 continue 862 if re.match("^# Sieving.*q=", line): 863 sys.stdout.write('\n') 864 print(line.rstrip()) 865 continue 866 foo = re.match("^# (\d+) relation", line) 867 if not foo: 868 sys.stdout.write('.') 869 sys.stdout.flush() 870 continue 871 sys.stdout.write('\n') 872 print(line.rstrip()) 873 if int(foo.groups()[0]) > 0: 874 for j,process in enumerate(processes): 875 if j != i: 876 process.child.kill() 877 if not os.path.exists(relsfilename): 878 shutil.copyfile(relsfilename+"."+str(i),relsfilename) 879 break 880 881 sys.stdout.write('\n') 882 if not rel: 883 print("No relation found!") 884 print("Trying again with another random seed...") 885 return None, None, None 886 print("Taking relation %s\n" % rel) 887 rel = rel.split(':') 888 a,b = [int(x) for x in rel[0].split(',')] 889 890 Num = a*gg[0][0] + b*gg[1][0] 891 Den = a*gg[0][1] + b*gg[1][1] 892 assert (zz*Den-Num) % p == 0 893 894 factNum = [ int(x, 16) for x in rel[2].split(',') ] 895 factDen = [ int(x, 16) for x in rel[1].split(',') ] 896 print(Num, Den, factNum, factDen) 897 898 assert(abs(Num) == functools.reduce(lambda x,y:x*y,factNum,1)) 899 assert(abs(Den) == functools.reduce(lambda x,y:x*y,factDen,1)) 900 901 lc_ratpol = int(general.poly_data()["Y"][1]) 902 for q in factNum + factDen: 903 if not self.logDB.has(q,-1,0): 904 if lc_ratpol % q == 0: 905 print("Would need to descend %s which divides the lc of the rational poly." % q) 906 print("Trying again with a new seed.") 907 return None, None, None 908 909 todofilename = os.path.join(general.datadir(), prefix + "todo") 910 911 if not os.path.exists(todofilename): 912 with open(todofilename, "w") as f: 913 for q in factNum + factDen: 914 if self.logDB.has(q,-1,0): 915 continue 916 logq = math.ceil(math.log(q, 2)) 917 print("Will do further descent for %d-bit rational prime %d" 918 % (logq, q)) 919 # las can understand when the rational root is missing 920 f.write("0 %d\n" % q) 921 else: 922 with open(todofilename, "r") as f: 923 for line in f: 924 side,q = line.strip().split(' ') 925 q=int(q) 926 logq = math.ceil(math.log(q, 2)) 927 print("Will do further descent for %d-bit rational prime %d" % (logq, q)) 928 929 930 return todofilename, [Num, Den, factNum, factDen], None 931 932 def do_descent_nonlinear(self, z): 933 p = general.p() 934 tmpdir = general.tmpdir() 935 prefix = general.prefix() + ".descent.%s.upper." % general.short_target() 936 polyfilename = os.path.join(tmpdir, prefix + "poly") 937 if general.extdeg() == 1: 938 zz = [ z ] 939 else: 940 zz = z 941 call_that = [ general.descentinit_bin(), 942 "-poly", general.poly(), 943 "-mt", 4, 944 "-minB1", self.minB1, 945 "-mineff", self.mineff, 946 "-maxeff", self.maxeff, 947 "-side", self.side, 948 "-extdeg", general.extdeg(), 949 "-lpb", self.lpb, 950 "-seed", 42, 951 "-jl", 952 p ] + zz 953 call_that = [str(x) for x in call_that] 954 initfilename = os.path.join(general.datadir(), prefix + "init") 955 with important_file(initfilename, call_that) as f: 956 for line in f: 957 line = line.strip() 958 foo = re.match("^Youpi: e = (\d+) is a winner", line) 959 if foo: 960 general.initrandomizer = int(foo.groups()[0]) 961 foo = re.match("^U = ([0-9\-,]+)", line) 962 if foo: 963 general.initU = [ int(x) for x in foo.groups()[0].split(',') ] 964 foo = re.match("^V = ([0-9\-,]+)", line) 965 if foo: 966 general.initV = [ int(x) for x in foo.groups()[0].split(',') ] 967 foo = re.match("^u = ([0-9]+)", line) 968 if foo: 969 general.initu = int(foo.groups()[0]) 970 foo = re.match("^v = ([0-9]+)", line) 971 if foo: 972 general.initv = int(foo.groups()[0]) 973 foo = re.match("^fac_u = ([, 0-9]+)", line) 974 if foo: 975 general.initfacu = [ [ int(y) for y in x.split(',') ] for x in foo.groups()[0].split(' ') ] 976 foo = re.match("^fac_v = ([, 0-9]+)", line) 977 if foo: 978 general.initfacv = [ [ int(y) for y in x.split(',') ] for x in foo.groups()[0].split(' ') ] 979 980 todofilename = os.path.join(general.datadir(), prefix + "todo") 981 print(general.initfacu) 982 print(general.initfacv) 983 984 if not os.path.exists(todofilename): 985 with open(todofilename, "w") as f: 986 for ideal in general.initfacu + general.initfacv: 987 q = ideal[0] 988 r = ideal[1] 989 if self.logDB.has(q,r,self.side): 990 continue 991 logq = math.ceil(math.log(q, 2)) 992 print("Will do further descent for %d-bit prime %d" 993 % (logq, q)) 994 f.write("%d %d %d\n" % (self.side, q, r)) 995 else: 996 with open(todofilename, "r") as f: 997 for line in f: 998 ll = line.strip().split(' ') 999 side = ll[0] 1000 q = ll[1] 1001 q=int(q) 1002 logq = math.ceil(math.log(q, 2)) 1003 print("Will do further descent for %d-bit prime %d" % (logq, q)) 1004 1005 1006 return todofilename, [general.initU, general.initV, 1007 general.initfacu, general.initfacv], None 1008 1009 def do_descent(self, z): 1010 if not self.external: 1011 if general.has_rational_side(): 1012 seed=42 1013 while True: 1014 tdf, spl, frf = self.do_descent_for_real(z, seed) 1015 if tdf != None: 1016 return tdf, spl, frf 1017 else: 1018 seed += 1 1019 else: 1020 return self.do_descent_nonlinear(z) 1021 else: 1022 return self.use_external_data(z) 1023 1024class DescentMiddleClass(object): 1025 def declare_args(parser): 1026 # TODO: import default values from the sieving parameters. 1027 parser.add_argument("--descent-hint", 1028 help="Hintfile for the descent", 1029 required=True, # TODO: fall back on default values. 1030 ) 1031 parser.add_argument("--I", 1032 help="Default value for I (must match hint file)", 1033 required=True, 1034 type=int) 1035 for side in range(2): 1036 parser.add_argument("--mfb%d" % side, 1037 help="Default cofactor bound on side %d" % side, 1038 required=True, 1039 type=int) 1040 parser.add_argument("--lim%d" % side, 1041 help="Default factor base bound on side %d (must match hint file)" % side, 1042 required=True, 1043 type=int) 1044 1045 def __init__(self, general, args): 1046 self.general = general 1047 self.args = args 1048 # We need to do some safety checking 1049 values_I=set() 1050 values_lim0=set() 1051 values_lim1=set() 1052 values_I.add(args.I) 1053 values_lim0.add(args.lim0) 1054 values_lim1.add(args.lim1) 1055 with open(args.descent_hint, 'r') as file: 1056 for line in file: 1057 if re.match("^\s*#", line): 1058 continue 1059 if re.match("^\s*$", line): 1060 continue 1061 line = line.strip() 1062 foo = re.match("^.*I=(\d+)\s+(\d+),[\d.,]+\s+(\d+),[\d.,]+$", 1063 line) 1064 if not foo: 1065 print("Warning, parse error in hint file at line:\n" + line) 1066 continue 1067 I,lim0,lim1 = foo.groups() 1068 values_I.add(int(I)) 1069 values_lim0.add(int(lim0)) 1070 values_lim1.add(int(lim1)) 1071 if len(values_lim0)>1: 1072 raise ValueError("lim0 values should match between cmdline and hint file") 1073 if len(values_lim1)>1: 1074 raise ValueError("lim1 values should match between cmdline and hint file") 1075 if len(values_I)>1: 1076 raise ValueError("I values should match between cmdline and hint file") 1077 print("Consistency check for las_descent passed") 1078 print("\tI=%d" % values_I.pop()) 1079 print("\tlim0=%d" % values_lim0.pop()) 1080 print("\tlim1=%d" % values_lim1.pop()) 1081 1082 1083 def do_descent(self, todofile): 1084 tmpdir = general.tmpdir() 1085 prefix = general.prefix() + ".descent.%s.middle." % general.short_target() 1086 1087 f = open(todofile, 'r') 1088 ntodo = len(list(f)) 1089 f.close() 1090 print ("--- Sieving (middle, %d rational primes) ---" % ntodo) 1091 s=general.lasMiddle_base_args() 1092 if args.descent_hint: 1093 s += [ "--descent-hint-table", args.descent_hint ] 1094 s += [ 1095 "--I", self.args.I, 1096 "--lim0", self.args.lim0, 1097 "--lim1", self.args.lim1, 1098 "--lpb0", general.lpb0(), 1099 "--mfb0", self.args.mfb0, 1100 "--lpb1", general.lpb1(), 1101 "--mfb1", self.args.mfb1, 1102 "-t", "machine,1,pu" if has_hwloc else "4" 1103 ] 1104 s += [ "--todo", todofile ] 1105 call_that=[str(x) for x in s] 1106 relsfilename = os.path.join(general.datadir(), prefix + "rels") 1107 1108 printing = False 1109 failed = [] 1110 with important_file(relsfilename, call_that) as relstream: 1111 for line in relstream: 1112 if re.match("^# taking path", line): 1113 print(line.rstrip()) 1114 elif re.match("^# END TREE", line): 1115 print("") 1116 printing = False 1117 elif printing: 1118 print(line.rstrip()) 1119 foo = re.match("# FAILED (\d+\@\d+)", line) 1120 if foo: 1121 failed.append(foo.groups()[0]) 1122 elif re.match("^# BEGIN TREE", line): 1123 print("") 1124 printing=True 1125 1126 if failed: 1127 raise RuntimeError("Failed descents for: " + ", ".join(failed)) 1128 1129 return relsfilename 1130 1131def prime_ideal_mixedprint(pr): 1132 p = pr[0] 1133 side = pr[1] 1134 if side == 0: 1135 machine = "%x 0 rat" % p 1136 human = "0,%d" % p 1137 else: 1138 r = pr[2] 1139 machine = "%x %d %x" % pr 1140 human = "%d,%d,%d" % (side,p,r) 1141 return machine,human 1142 1143 1144class DescentLowerClass(object): 1145 def declare_args(parser): 1146 pass 1147 def __init__(self, general, args): 1148 self.general = general 1149 self.args = args 1150 1151 def __count_multiplicites(self, L): 1152 LL = [] 1153 prev_p = L[0] 1154 m = 1 1155 for i in range(1,len(L)): 1156 p = L[i] 1157 if p == prev_p: 1158 m += 1 1159 else: 1160 LL.append([prev_p, m]) 1161 prev_p = p 1162 m = 1 1163 LL.append([prev_p, m]) 1164 return LL 1165 1166 def do_descent(self, relsfile, initial_split): 1167 args = parser.parse_args() 1168 tmpdir = general.tmpdir() 1169 prefix = general.prefix() + ".descent.%s.lower." % general.short_target() 1170 relsforSM = os.path.join(tmpdir, prefix + "relsforSM") 1171 SMfile = os.path.join(tmpdir, prefix + "SM") 1172 1173 # Read descent relations 1174 descrels = [] 1175 for rfile in relsfile: 1176 with open(rfile, 'r') as file: 1177 with open(relsforSM, 'a') as fileSM: 1178 for line in file: 1179 foo = re.match("^Taken: (-?\d+),(-?\d+):", line) 1180 if foo: 1181 r = line.split(':')[1:] 1182 r[0] = r[0].lstrip() 1183 fileSM.write(r[0] + ":" + r[1] + ":" + r[2]) 1184 a,b = r[0].split(',') 1185 a=int(a) 1186 b=int(b) 1187 list_p = [ [], [] ] 1188 for side in range(2): 1189 for p in r[side+1].strip().split(','): 1190 list_p[side].append(int(p, 16)) 1191 list_p = [ self.__count_multiplicites(list_p[0]), 1192 self.__count_multiplicites(list_p[1])] 1193 descrels.append(([a,b], list_p)) 1194 nrels = len(descrels) 1195 print ("--- Final reconstruction (from %d relations) ---" % nrels) 1196 1197 # Compute SM 1198 call_that = [ general.sm_simple_bin(), 1199 "-poly", general.poly(), 1200 "-inp", relsforSM, 1201 "-out", SMfile, 1202 "-ell", general.ell() 1203 ] 1204 if self.args.sm_mode is not None: 1205 call_that += [ "-sm-mode", self.args.sm_mode ] 1206 call_that = [str(x) for x in call_that] 1207 print("command line:\n" + " ".join(call_that)) 1208 with open(os.devnull, 'w') as devnull: 1209 subprocess.check_call(call_that, stderr=devnull) 1210 1211 SM = [] 1212 with open(SMfile, 'r') as file: 1213 for line in file: 1214 r = line.split() 1215 sm = [ int(x) for x in r ] 1216 SM.append(sm) 1217 assert len(SM) == nrels 1218 1219 # Reverse the order of relations to get only one unknown log 1220 # per relation while processing them 1221 descrels.reverse() 1222 SM.reverse() 1223 1224 # Fill-in the log database 1225 logDB = general.logDB 1226 irel = 0 1227 for rel in descrels: 1228 unk = None 1229 a, b = rel[0] 1230 list_p = rel[1] 1231 acc_log = 0 1232 if logDB.fullcolumn != None: 1233 acc_log += logDB.fullcolumn 1234 sm = SM[irel] 1235 for i in range(len(sm)): 1236 acc_log += logDB.allSM(i)*sm[i] 1237 for side in range(2): 1238 for p, k in list_p[side]: 1239 ideal = ideals_above_p(p, k, a, b, side, general) 1240 log = ideal.get_log() 1241 if log == None: 1242 if unk != None: 1243 raise ValueError( 1244 "Two unknown ideals in relation a,b=%d,%d: %d (side %d) and %d (side %d)" 1245 % (a, b, unk[0], unk[2], p, side)) 1246 else: 1247 unk = [p, a_over_b_mod_p(a, b, p), side] 1248 else: 1249 acc_log += ideal.get_log() 1250 acc_log = acc_log % general.ell() 1251 if unk == None: 1252 assert acc_log == 0 1253 else: 1254 log = general.ell() - acc_log 1255 print ("Deduced log of (%d, %d, %d) from rel: %d" 1256 % (unk[0], unk[1], unk[2], log)) 1257 logDB.add_log(unk[0], unk[1], unk[2], log) 1258 irel += 1 1259 1260 if general.has_rational_side(): 1261 ## Deduce the log of the target 1262 Num, Den, factNum, factDen = initial_split 1263 log_target = 0 1264 errors=[] 1265 for p in factNum: 1266 lp = logDB.get_log(p, -1, 0) 1267 if lp is None: 1268 errors.append(p) 1269 else: 1270 log_target = log_target + lp 1271 for p in factDen: 1272 lp = logDB.get_log(p, -1, 0) 1273 if lp is None: 1274 errors.append(p) 1275 else: 1276 log_target = log_target - lp 1277 if len(errors): 1278 msg = "Some logarithms missing:\n" 1279 msg += "\n".join(["\t"+str(x) for x in errors]) 1280 raise RuntimeError(msg) 1281 p=general.p() 1282 ell=general.ell() 1283 log_target = log_target % ell 1284 if general.initrandomizer != 1: 1285 # divide result by randomizer modulo ell 1286 multiplier = pow(general.initrandomizer, ell-2, ell) 1287 log_target = (log_target * multiplier) % ell 1288 print("# p=%d" % p) 1289 print("# ell=%d" % ell) 1290 print("log(2)=%d" % logDB.get_log(2, -1, 0)) 1291 print("log(3)=%d" % logDB.get_log(3, -1, 0)) 1292 print("# target=%s" % args.target) 1293 print("log(target)=%d" % log_target) 1294 check_result(2, logDB.get_log(2, -1, 0), int(args.target), log_target, p, ell) 1295 else: 1296 ## No rational side; more complicated. 1297 # We need to compute the SMs for U and V. 1298 polyforSM = os.path.join(tmpdir, prefix + "polyforSM") 1299 SM2file = os.path.join(tmpdir, prefix + "SM2") 1300 1301 with open(polyforSM, 'w') as f: 1302 for poly in [ general.initU, general.initV ]: 1303 f.write("p %d" % (len(poly)-1)) 1304 for c in poly: 1305 f.write(" %d" % c) 1306 f.write("\n") 1307 1308 call_that = [ general.sm_simple_bin(), 1309 "-poly", general.poly(), 1310 "-inp", polyforSM, 1311 "-out", SM2file, 1312 "-ell", general.ell() 1313 ] 1314 call_that = [str(x) for x in call_that] 1315 print("command line:\n" + " ".join(call_that)) 1316 with open(os.devnull, 'w') as devnull: 1317 subprocess.check_call(call_that, stderr=devnull) 1318 1319 SM2 = [] 1320 with open(SM2file, 'r') as file: 1321 for line in file: 1322 r = line.split() 1323 sm = [ int(x) for x in r ] 1324 SM2.append(sm) 1325 assert len(SM2) == 2 1326 1327 ell = general.ell() 1328 vlog = [0, 0] 1329 factored = [ general.initfacu, general.initfacv ] 1330 for i in range(0,2): 1331 for xx in factored[i]: 1332 vlog[i] += logDB.get_log(xx[0], xx[1], general.init_side) 1333 ind_shift = 0 1334 if general.init_side == 1: 1335 ind_shift = logDB.nSM(0) 1336 for j in range(logDB.nSM(general.init_side)): 1337 vlog[i] += logDB.SM(general.init_side,j)*SM2[i][ind_shift+j] 1338 vlog[i] = vlog[i] % ell 1339 1340 log_target = (vlog[0] - vlog[1]) % ell 1341 multiplier = pow(general.initrandomizer, ell-2, ell) 1342 log_target = (log_target * multiplier) % ell 1343 print("# p=%d" % general.p()) 1344 print("# ell=%d" % ell) 1345 print("# target=%s" % args.target) 1346 print("log(target)=%d" % log_target) 1347 1348 1349 1350# http://stackoverflow.com/questions/107705/disable-output-buffering 1351# shebang takes only one arg... 1352# python3 doesn't grok sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) 1353# setting PYTHONUNBUFFERED here is too late. 1354class drive_me_crazy(object): 1355 def __init__(self, stream, timestamp=False): 1356 self.stream = stream 1357 self.eol = 1 1358 self.timestamp = timestamp 1359 def write(self, data): 1360 if self.timestamp: 1361 p=0 1362 while len(data) > p: 1363 d = data.find('\n', p) 1364 if d < 0: 1365 break 1366 if self.eol: 1367 self.stream.write(time.asctime() + " ") 1368 self.stream.write(data[p:d+1]) 1369 self.eol=True 1370 p = d + 1 1371 if len(data) > p: 1372 if self.eol: 1373 self.stream.write(time.asctime() + " ") 1374 self.stream.write(data[p:]) 1375 self.eol= False 1376 else: 1377 self.stream.write(data) 1378 self.stream.flush() 1379 def __getattr__(self, attr): 1380 return getattr(self.stream, attr) 1381 1382 1383if __name__ == '__main__': 1384 1385 # Parse command line arguments 1386 parser = argparse.ArgumentParser(description="Descent initialization for DLP") 1387 1388 # Required 1389 parser.add_argument("--target", help="Element whose DL is wanted", 1390 type=str, required=True) 1391 parser.add_argument("--timestamp", 1392 help="Prefix all lines with a time stamp", 1393 action="store_true") 1394 1395 1396 GeneralClass.declare_args(parser) 1397 DescentUpperClass.declare_args(parser) 1398 DescentMiddleClass.declare_args(parser) 1399 DescentLowerClass.declare_args(parser) 1400 1401 args = parser.parse_args() 1402 1403 sys.stdout = drive_me_crazy(sys.stdout, args.timestamp) 1404 1405 las_bin = os.path.join(args.cadobindir, "sieve", "las") 1406 cp = subprocess.Popen([ las_bin, "-help" ], 1407 stdout=subprocess.PIPE, 1408 stderr=subprocess.PIPE) 1409 if re.search("unused, needs hwloc", cp.stderr.read().decode()): 1410 has_hwloc = False 1411 else: 1412 has_hwloc = True 1413 1414 general = GeneralClass(args) 1415 1416 if general.target() == 1: 1417 # the re-randomization does not work for target=1 1418 print("# p=%d" % general.p()) 1419 print("# ell=%d" % general.ell()) 1420 print("# target=%s" % args.target) 1421 print("log(target)=0") 1422 else: 1423 init = DescentUpperClass(general, args) 1424 middle = DescentMiddleClass(general, args) 1425 lower = DescentLowerClass(general, args) 1426 1427 todofile, initial_split, firstrelsfile = init.do_descent(general.target()) 1428 relsfile = middle.do_descent(todofile) 1429 if firstrelsfile: 1430 lower.do_descent([firstrelsfile, relsfile], initial_split) 1431 else: 1432 lower.do_descent([relsfile], initial_split) 1433 1434 general.cleanup() 1435