3
4
5
6
7
8
9
10 INTEGER CSRC, GCINDX, GRINDX, LRINDX, LCINDX, MYCOL,
11 $ MYROW, NPCOL, NPROW, RSRC
12
13
14 INTEGER DESC( * )
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123 INTEGER BLOCK_CYCLIC_2D, CSRC_, CTXT_, DLEN_, DTYPE_,
124 $ LLD_, MB_, M_, NB_, N_, RSRC_
125 parameter( block_cyclic_2d = 1, dlen_ = 9, dtype_ = 1,
126 $ ctxt_ = 2, m_ = 3, n_ = 4, mb_ = 5, nb_ = 6,
127 $ rsrc_ = 7, csrc_ = 8, lld_ = 9 )
128
129
130 INTEGER CBLK, GCCPY, GRCPY, RBLK
131
132
133 INTRINSIC mod
134
135
136
137 grcpy = grindx-1
138 gccpy = gcindx-1
139
140 rblk = grcpy / desc(mb_)
141 cblk = gccpy / desc(nb_)
142 rsrc = mod( rblk + desc(rsrc_), nprow )
143 csrc = mod( cblk + desc(csrc_), npcol )
144
145 lrindx = ( rblk / nprow + 1 ) * desc(mb_) + 1
146 lcindx = ( cblk / npcol + 1 ) * desc(nb_) + 1
147
148 IF( mod( myrow+nprow-desc(rsrc_), nprow ) .GE.
149 $ mod( rblk, nprow ) ) THEN
150 IF( myrow.EQ.rsrc )
151 $ lrindx = lrindx + mod( grcpy, desc(mb_) )
152 lrindx = lrindx - desc(mb_)
153 END IF
154
155 IF( mod( mycol+npcol-desc(csrc_), npcol ) .GE.
156 $ mod( cblk, npcol ) ) THEN
157 IF( mycol.EQ.csrc )
158 $ lcindx = lcindx + mod( gccpy, desc(nb_) )
159 lcindx = lcindx - desc(nb_)
160 END IF
161
162 RETURN
163
164
165