1 SUBROUTINE dlarrv2( N, VL, VU, D, L, PIVMIN,
2 $ ISPLIT, M, DOL, DOU, NEEDIL, NEEDIU,
3 $ MINRGP, RTOL1, RTOL2, W, WERR, WGAP,
4 $ IBLOCK, INDEXW, GERS, SDIAM,
6 $ WORK, IWORK, VSTART, FINISH,
7 $ MAXCLS, NDEPTH, PARITY, ZOFFSET, INFO )
16 INTEGER DOL, DOU, INFO, LDZ, M, N, MAXCLS,
17 $ NDEPTH, NEEDIL, NEEDIU, PARITY, ZOFFSET
18 DOUBLE PRECISION MINRGP, PIVMIN, RTOL1, RTOL2, VL, VU
19 LOGICAL VSTART, FINISH
22 INTEGER IBLOCK( * ), INDEXW( * ), ISPLIT( * ),
23 $ ISUPPZ( * ), IWORK( * )
24 DOUBLE PRECISION D( * ), GERS( * ), L( * ), SDIAM( * ),
26 $ wgap( * ), work( * )
27 DOUBLE PRECISION Z( LDZ, * )
247 INTEGER MAXITR, USE30, USE31, USE32A, USE32B
248 PARAMETER ( MAXITR = 10, use30=30, use31=31,
249 $ use32a=3210, use32b = 3211 )
250 DOUBLE PRECISION ZERO, ONE, TWO, THREE, FOUR, HALF
251 PARAMETER ( ZERO = 0.0d0, one = 1.0d0,
252 $ two = 2.0d0, three = 3.0d0,
253 $ four = 4.0d0, half = 0.5d0)
259 LOGICAL DELREF, ESKIP, NEEDBS, ONLYLC, STP2II, TRYMID,
260 $ TRYRQC, USEDBS, USEDRQ
261 INTEGER I, IBEGIN, IEND, II, IINCLS, IINDC1, IINDC2,
262 $ iindwk, iinfo, im, in, indeig, indld, indlld,
263 $ indwrk, isupmn, isupmx, iter, itmp1, itwist, j,
264 $ jblk, k, kk, miniwsize, minwsize, mywfst,
265 $ mywlst, nclus, negcnt, newcls, newfst, newftt,
266 $ newlst, newsiz, offset, oldcls, oldfst, oldien,
267 $ oldlst, oldncl, p, q, vrtree, wbegin, wend,
268 $ windex, windmn, windpl, zfrom, zindex, zto,
269 $ zusedl, zusedu, zusedw
270 DOUBLE PRECISION AVGAP, BSTRES, BSTW, ENUFGP, EPS, FUDGE, GAP,
271 $ GAPTOL, LAMBDA, LEFT, LGAP, LGPVMN, LGSPDM,
272 $ LOG_IN, MGAP, MINGMA, MYERR, NRMINV, NXTERR,
273 $ ORTOL, RESID, RGAP, RIGHT, RLTL30, RQCORR,
274 $ RQTOL, SAVEGP, SGNDEF, SIGMA, SPDIAM, SSIGMA,
278 DOUBLE PRECISION DLAMCH
279 DOUBLE PRECISION DDOT, DNRM2
280 EXTERNAL DDOT, DLAMCH, DNRM2
283 EXTERNAL DAXPY, DCOPY, DLAR1VA, DLARRB2,
287 INTRINSIC abs, dble,
max,
min, sqrt
310 eps = dlamch(
'Precision' )
321 lgpvmn = log( pivmin )
357 zusedw = zusedu - zusedl + 1
359 CALL dlaset(
'Full', n, zusedw, zero, zero,
360 $ z(1,(zusedl-zoffset)), ldz )
370 DO 10 jblk = 1, iblock( m )
371 iend = isplit( jblk )
376 IF( iblock( wend+1 ).EQ.jblk )
THEN
381 IF( wend.LT.wbegin )
THEN
382 iwork( iincls + jblk ) = 0
385 ELSEIF( (wend.LT.dol).OR.(wbegin.GT.dou) )
THEN
386 iwork( iincls + jblk ) = 0
392 im = wend - wbegin + 1
394 IF( ibegin.EQ.iend )
THEN
395 iwork( iincls + jblk ) = 0
396 z( ibegin, (wbegin-zoffset) ) = one
397 isuppz( 2*wbegin-1 ) = ibegin
398 isuppz( 2*wbegin ) = ibegin
399 w( wbegin ) = w( wbegin ) + sigma
400 work( wbegin ) = w( wbegin )
405 CALL dcopy( im, w( wbegin ), 1,
406 & work( wbegin ), 1 )
410 w(wbegin+i-1) = w(wbegin+i-1)+sigma
414 iwork( iincls + jblk ) = 1
415 iwork( iindc1+ibegin ) = 1
416 iwork( iindc1+ibegin+1 ) = im
439 DO 170 jblk = 1, iblock( m )
440 iend = isplit( jblk )
452 IF( iblock( wend+1 ).EQ.jblk )
THEN
459 oldncl = iwork( iincls + jblk )
460 IF( oldncl.EQ.0 )
THEN
468 in = iend - ibegin + 1
470 im = wend - wbegin + 1
474 lgspdm = log( spdiam + pivmin )
476 ortol = spdiam*1.0d-3
478 avgap = spdiam/dble(in-1)
482 enufgp =
min(ortol,avgap)
488 IF( oldncl.GT.0 )
THEN
490 IF( ndepth.GT.m )
THEN
500 log_in = log(dble(in))
502 rltl30 =
min( 1.0d-2, one / dble( in ) )
504 IF( parity.EQ.0 )
THEN
505 oldcls = iindc1+ibegin-1
506 newcls = iindc2+ibegin-1
508 oldcls = iindc2+ibegin-1
509 newcls = iindc1+ibegin-1
517 oldfst = iwork( j-1 )
519 IF( ndepth.GT.0 )
THEN
525 IF((dol.EQ.1).AND.(dou.EQ.m))
THEN
528 j = wbegin + oldfst - 1
530 IF(wbegin+oldfst-1.LT.dol)
THEN
533 ELSEIF(wbegin+oldfst-1.GT.dou)
THEN
537 j = wbegin + oldfst - 1
540 CALL dcopy( in, z( ibegin, (j-zoffset) ),
541 $ 1, d( ibegin ), 1 )
542 CALL dcopy( in-1, z( ibegin, (j+1-zoffset) ),
544 sigma = z( iend, (j+1-zoffset) )
546 CALL dlaset(
'Full', in, 2, zero, zero,
547 $ z( ibegin, (j-zoffset) ), ldz )
551 DO 50 j = ibegin, iend-1
553 work( indld-1+j ) = tmp
554 work( indlld-1+j ) = tmp*l( j )
557 IF( ndepth.GT.0 .AND. delref )
THEN
560 p = indexw( wbegin-1+oldfst )
561 q = indexw( wbegin-1+oldlst )
565 offset = indexw( wbegin ) - 1
568 CALL dlarrb2( in, d( ibegin ),
569 $ work(indlld+ibegin-1),
570 $ p, q, rtol1, rtol2, offset,
571 $ work(wbegin),wgap(wbegin),werr(wbegin),
572 $ work( indwrk ), iwork( iindwk ),
573 $ pivmin, lgpvmn, lgspdm, in, iinfo )
574 IF( iinfo.NE.0 )
THEN
585 IF( oldfst.GT.1)
THEN
586 wgap( wbegin+oldfst-2 ) =
587 $
max(wgap(wbegin+oldfst-2),
588 $ w(wbegin+oldfst-1)-werr(wbegin+oldfst-1)
589 $ - w(wbegin+oldfst-2)-werr(wbegin+oldfst-2) )
591 IF( wbegin + oldlst -1 .LT. wend )
THEN
592 wgap( wbegin+oldlst-1 ) =
593 $
max(wgap(wbegin+oldlst-1),
594 $ w(wbegin+oldlst)-werr(wbegin+oldlst)
595 $ - w(wbegin+oldlst-1)-werr(wbegin+oldlst-1) )
599 DO 53 j=oldfst,oldlst
600 w(wbegin+j-1) = work(wbegin+j-1)+sigma
602 ELSEIF( (ndepth.EQ.0) .OR. (.NOT.delref) )
THEN
608 DO 54 j = oldfst, oldlst-1
609 myerr = werr(wbegin + j - 1)
610 nxterr = werr(wbegin + j )
611 wgap(wbegin+j-1) =
max(wgap(wbegin+j-1),
612 $ ( work(wbegin+j) - nxterr )
613 $ - ( work(wbegin+j-1) + myerr )
621 DO 140 j = oldfst, oldlst
622 IF( j.EQ.oldlst )
THEN
627 IF (vrtree.EQ.use30)
THEN
628 IF(wgap( wbegin + j -1).GE.
629 $ rltl30 * abs(work(wbegin + j -1)) )
THEN
637 ELSE IF (vrtree.EQ.use31)
THEN
638 IF ( wgap( wbegin + j -1).GE.
639 $ minrgp* abs( work(wbegin + j -1) ) )
THEN
648 ELSE IF (vrtree.EQ.use32a)
THEN
649 IF( (j.EQ.oldfst).AND.( wgap(wbegin+j-1).GE.
650 $ minrgp* abs(work(wbegin+j-1)) ) )
THEN
654 ELSE IF( (j.GT.oldfst).AND.(j.EQ.newfst).AND.
655 $ ( wgap(wbegin+j-2).GE.
656 $ minrgp* abs(work(wbegin+j-1)) ).AND.
657 $ ( wgap(wbegin+j-1).GE.
658 $ minrgp* abs(work(wbegin+j-1)) )
662 ELSE IF( (j.GT.newfst).AND.wgap(wbegin+j-1).GE.
663 $ (minrgp*abs(work(wbegin+j-1)) ) )
667 ELSE IF((j.GT.newfst).AND.(j+1.LT.oldlst).AND.
668 $ (wgap(wbegin+j-1).GE.enufgp))
679 ELSE IF (vrtree.EQ.use32b)
THEN
680 IF( (j.EQ.oldfst).AND.( wgap(wbegin+j-1).GE.
681 $ minrgp* abs(work(wbegin+j-1)) ) )
THEN
685 ELSE IF( (j.GT.oldfst).AND.(j.EQ.newfst).AND.
686 $ ( wgap(wbegin+j-2).GE.
687 $ minrgp* abs(work(wbegin+j-1)) ).AND.
688 $ ( wgap(wbegin+j-1).GE.
689 $ minrgp* abs(work(wbegin+j-1)) )
693 ELSE IF( (j.GT.newfst).AND.wgap(wbegin+j-1).GE.
694 $ (minrgp*abs(work(wbegin+j-1)) ) )
698 ELSE IF((j.GT.newfst).AND.(j+1.LT.oldlst).AND.
699 $ (wgap( wbegin + j -1).GE.
700 $ rltl30 * abs(work(wbegin + j -1)) ))
715 newsiz = newlst - newfst + 1
716 maxcls =
max( newsiz, maxcls )
720 IF((dol.EQ.1).AND.(dou.EQ.m))
THEN
723 newftt = wbegin + newfst - 1
725 IF(wbegin+newfst-1.LT.dol)
THEN
728 ELSEIF(wbegin+newfst-1.GT.dou)
THEN
732 newftt = wbegin + newfst - 1
736 newftt = newftt - zoffset
738 IF( newsiz.GT.1)
THEN
743 IF((wbegin+newlst-1.LT.dol).OR.
744 $ (wbegin+newfst-1.GT.dou))
THEN
752 IF( newfst.EQ.1 )
THEN
754 $ w(wbegin)-werr(wbegin) - vl )
756 lgap = wgap( wbegin+newfst-2 )
758 rgap = wgap( wbegin+newlst-1 )
765 IF(newsiz.GE.50)
THEN
767 DO 545 k =newfst+newsiz/3,newlst-newsiz/3
768 IF(mgap.LT.wgap( wbegin+k-1 ))
THEN
770 mgap = wgap( wbegin+k-1 )
778 needil =
min(needil,wbegin+newfst-1)
779 neediu =
max(neediu,wbegin+newlst-1)
785 trymid = (mgap.GT.gap)
804 p = indexw( wbegin-1+splace(k) )
805 offset = indexw( wbegin ) - 1
806 CALL dlarrb2( in, d(ibegin),
807 $ work( indlld+ibegin-1 ),p,p,
808 $ rqtol, rqtol, offset,
809 $ work(wbegin),wgap(wbegin),
810 $ werr(wbegin),work( indwrk ),
812 $ pivmin, lgpvmn, lgspdm, in, iinfo )
819 CALL dlarrf2( in, d( ibegin ), l( ibegin ),
820 $ work(indld+ibegin-1),
821 $ splace(1), splace(2),
822 $ splace(3), splace(4), work(wbegin),
823 $ wgap(wbegin), werr(wbegin), trymid,
824 $ spdiam, lgap, rgap, pivmin, tau,
825 $ z( ibegin, newftt ),
826 $ z( ibegin, newftt+1 ),
827 $ work( indwrk ), iinfo )
828 IF( iinfo.EQ.0 )
THEN
832 z( iend, newftt+1 ) = ssigma
835 DO 116 k = newfst, newlst
837 $ three*eps*abs(work(wbegin+k-1))
838 work( wbegin + k - 1 ) =
839 $ work( wbegin + k - 1) - tau
841 $ four*eps*abs(work(wbegin+k-1))
843 werr( wbegin + k - 1 ) =
844 $ werr( wbegin + k - 1 ) + fudge
849 iwork( k-1 ) = newfst
856 mywfst =
max(wbegin-1+newfst,dol-1)
857 mywlst =
min(wbegin-1+newlst,dou+1)
859 mywfst = wbegin-1+newfst
860 mywlst = wbegin-1+newlst
864 DO 5000 k = ibegin, iend-1
874 offset = indexw( wbegin ) - 1
878 $ z(ibegin, newftt ),
879 $ work(indwrk+ibegin-1),
880 $ p, q, rtol1, rtol2, offset,
881 $ work(wbegin),wgap(wbegin),werr(wbegin),
882 $ work( indwrk+n ), iwork( iindwk ),
883 $ pivmin, lgpvmn, lgspdm, in, iinfo )
884 IF( iinfo.NE.0 )
THEN
890 DO 5003 k=newfst,newlst
891 w(wbegin+k-1) = work(wbegin+k-1)+ssigma
905 tol = four * log_in * eps
908 windex = wbegin + k - 1
909 zindex = windex - zoffset
910 windmn =
max(windex - 1,1)
911 windpl =
min(windex + 1,m)
912 lambda = work( windex )
914 IF((windex.LT.dol).OR.
915 $ (windex.GT.dou))
THEN
921 left = work( windex ) - werr( windex )
922 right = work( windex ) + werr( windex )
923 indeig = indexw( windex )
925 lgap = eps*
max(abs(left),abs(right))
930 rgap = eps*
max(abs(left),abs(right))
934 gap =
min( lgap, rgap )
935 IF(( k .EQ. 1).OR.(k .EQ. im))
THEN
947 savegp = wgap(windex)
968 offset = indexw( wbegin ) - 1
969 CALL dlarrb2( in, d(ibegin),
970 $ work(indlld+ibegin-1),indeig,indeig,
971 $ zero, two*eps, offset,
972 $ work(wbegin),wgap(wbegin),
973 $ werr(wbegin),work( indwrk ),
975 $ pivmin, lgpvmn, lgspdm, itmp1, iinfo )
976 IF( iinfo.NE.0 )
THEN
980 lambda = work( windex )
986 CALL dlar1va( in, 1, in, lambda, d(ibegin),
987 $ l( ibegin ), work(indld+ibegin-1),
988 $ work(indlld+ibegin-1),
989 $ pivmin, gaptol, z( ibegin, zindex),
990 $ .NOT.usedbs, negcnt, ztz, mingma,
991 $ itwist, isuppz( 2*windex-1 ),
992 $ nrminv, resid, rqcorr, work( indwrk ) )
996 ELSEIF(resid.LT.bstres)
THEN
1000 isupmn =
min(isupmn,isuppz( 2*windex-1 ))
1001 isupmx =
max(isupmx,isuppz( 2*windex ))
1007 IF( resid.GT.tol*gap .AND. abs( rqcorr ).GT.
1008 $ rqtol*abs( lambda ) .AND. .NOT. usedbs)
1013 IF(indeig.LE.negcnt)
THEN
1022 IF( ( rqcorr*sgndef.GE.zero )
1023 $ .AND.( lambda + rqcorr.LE. right)
1024 $ .AND.( lambda + rqcorr.GE. left)
1028 IF(sgndef.EQ.one)
THEN
1038 $ half * (right + left)
1041 lambda = lambda + rqcorr
1044 $ half * (right-left)
1048 IF(right-left.LT.rqtol*abs(lambda))
THEN
1053 ELSEIF( iter.LT.maxitr )
THEN
1055 ELSEIF( iter.EQ.maxitr )
THEN
1064 IF(usedrq .AND. usedbs .AND.
1065 $ bstres.LE.resid)
THEN
1070 CALL dlar1va( in, 1, in, lambda,
1071 $ d( ibegin ), l( ibegin ),
1072 $ work(indld+ibegin-1),
1073 $ work(indlld+ibegin-1),
1075 $ z( ibegin, zindex ),
1076 $ .NOT.usedbs, negcnt, ztz, mingma,
1078 $ isuppz( 2*windex-1 ),
1079 $ nrminv, resid, rqcorr, work( indwrk ) )
1081 work( windex ) = lambda
1086 isuppz( 2*windex-1 ) = isuppz( 2*windex-1 )+oldien
1087 isuppz( 2*windex ) = isuppz( 2*windex )+oldien
1088 zfrom = isuppz( 2*windex-1 )
1089 zto = isuppz( 2*windex )
1090 isupmn = isupmn + oldien
1091 isupmx = isupmx + oldien
1093 IF(isupmn.LT.zfrom)
THEN
1094 DO 122 ii = isupmn,zfrom-1
1095 z( ii, zindex ) = zero
1098 IF(isupmx.GT.zto)
THEN
1099 DO 123 ii = zto+1,isupmx
1100 z( ii, zindex ) = zero
1103 CALL dscal( zto-zfrom+1, nrminv,
1104 $ z( zfrom, zindex ), 1 )
1107 w( windex ) = lambda+sigma
1116 wgap( windmn ) =
max( wgap(windmn),
1117 $ w(windex)-werr(windex)
1118 $ - w(windmn)-werr(windmn) )
1120 IF( windex.LT.wend )
THEN
1121 wgap( windex ) =
max( savegp,
1122 $ w( windpl )-werr( windpl )
1123 $ - w( windex )-werr( windex) )
1135 iwork( iincls + jblk ) = nclus
1146 DO 180 jblk = 1, iblock( m )
1147 finish = finish .AND. (iwork(iincls + jblk).EQ.0)
1150 IF(.NOT.finish)
THEN
1152 IF((needil.GE.dol).AND.(neediu.LE.dou))
THEN