43 1 X ,V ,MS ,A ,SPBUF ,
44 2 WA ,ITAB ,KXSP ,IXSP ,NOD2SP ,
45 3 D ,ISPSYM ,XSPSYM ,VSPSYM ,BUFMAT ,
46 4 BUFGEO ,NPC ,PLD ,PM ,GEO ,
47 5 ISPCOND ,XFRAME ,WASPSYM ,IPARTSP ,PARTSAV ,
48 6 WACOMP ,WSMCOMP ,WASPACT ,IPART ,ITASK ,
49 7 SPH2SOL ,SOL2SPH ,IRST ,IXS ,IPARG ,
50 8 NGROUNC ,IGROUNC ,ELBUF_TAB,IAD_ELEM,FR_ELEM,
51 9 IGEO ,SOL2SPH_TYP,SPH_WORK)
63#include "implicit_f.inc"
68#include "vect01_c.inc"
80 INTEGER KXSP(NISP,*),IXSP(KVOISPH,*),NOD2SP(*),ITAB(*),
81 . ISPSYM(NSPCOND,*),NPC(*),ISPCOND(NISPCOND,*),
82 . IPARTSP(*),WASPACT(*),IPART(LIPART1,*), ITASK,
83 . SPH2SOL(*),IXS(NIXS,*),IRST(3,*),SOL2SPH(2,*),
84 . IPARG(NPARG,*), NGROUNC, IGROUNC(*),
85 . IAD_ELEM(2,*),FR_ELEM(*),IGEO(NPROPGI,*),SOL2SPH_TYP(*)
88 . X(3,*) ,(3,*) ,MS(*) ,
89 . a(3,*) ,spbuf(nspbuf,*) ,wa(*),
90 . d(3,*) ,xspsym(3,*) ,vspsym(3,*),
91 . pm(npropm,*), geo(npropg,*),bufmat(*),bufgeo(*),pld(*),
92 . xframe(nxframe,*), waspsym(3,*), partsav(npsav,*),
93 . wacomp(16,*), wsmcomp(6,*)
94 TYPE (ELBUF_STRUCT_),
TARGET,
DIMENSION(NGROUP) :: ELBUF_TAB
95 TYPE (SPH_WORK_) :: SPH_WORK
99 INTEGER N,INOD,JNOD,J,NVOIS,M,,
102 . IS,IC,ISLIDE,IPRT,NS,MYADRN,
103 . nsol, nski, n1, n2, n3, n4, n5, n6, n7, n8,
104 . ir, it, nsphdir, kp, np,nelem, offset, nel, ig, ng,
105 . ii, ii1, sz, lenr, ierror
107 . xi,yi,zi,di,rhoi,presi,xj,yj,zj,dj,presj,rhoj,dij,
108 . vxi,vyi,vzi,vxj,vyj,vzj,
112 . alpci,alpcj,fact,faci,facj,
113 . wax,way,waz,axi,ayi,azi,axj,ayj,azj,an,
114 . vv,kv,ehourt,dtinv,
115 . ox,oy,oz,nx,ny,nz,axs,ays,azs,
116 . alphai,betaxi,betayi,betazi,betai,
117 . alphaj,betaxj,betayj,betazj,betaj,unm,
118 . vx1,vx2,vx3,vx4,vx5,vx6,vx7,vx8,
119 . vy1,vy2,vy3,vy4,vy5,vy6,vy7,vy8,
120 . vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,usdt,
121 . phi1,phi2,phi3,phi4,phi5,phi6,phi7,phi8,
127 TYPE(g_bufel_) ,
POINTER :: GBUF, GBUFSP
128 TYPE(L_BUFEL_) ,
POINTER :: LBUF
129 TYPE(BUF_MAT_) ,
POINTER :: MBUF
132 . A_GAUSS(9,9),A_GAUSS_TETRA(9,9)
140 3 -.666666666666666,0. ,0.666666666666666,
149 6 -.833333333333333,-.5 ,-.166666666666666,
150 6 0.166666666666666,0.5 ,0.833333333333333,
152 7 -.857142857142857,-.571428571428571,-.285714285714285,
153 7 0. ,0.285714285714285,0.571428571428571,
154 7 0.857142857142857,0. ,0. ,
155 8 -.875 ,-.625 ,-.375 ,
156 8 -.125 ,0.125 ,0.375,
158 9 -.888888888888888,-.666666666666666,-.444444444444444,
159 9 -.222222222222222,0. ,0.222222222222222,
160 9 0.444444444444444,0.666666666666666,0.888888888888888/
163 1 0.250000000000000,0.000000000000000,0.000000000000000,
164 1 0.000000000000000,0.000000000000000,0.000000000000000,
165 1 0.000000000000000,0.000000000000000,0.000000000000000,
166 2 0.166666666666667,0.500000000000000,0.000000000000000,
167 2 0.000000000000000,0.000000000000000,0.000000000000000,
168 2 0.000000000000000,0.000000000000000,0.000000000000000,
169 3 0.125000000000000,0.375000000000000,0.625000000000000,
170 3 0.000000000000000,0.000000000000000,0.000000000000000,
171 3 0.000000000000000,0.000000000000000,0.000000000000000,
172 4 0.100000000000000,0.300000000000000,0.500000000000000,
173 4 0.700000000000000,0.000000000000000,0.000000000000000,
174 4 0.000000000000000,0.000000000000000,0.000000000000000,
175 5 0.083333333333333,0.250000000000000,0.416666666666667,
176 5 0.583333333333333,0.750000000000000,0.000000000000000,
177 5 0.000000000000000,0.000000000000000,0.000000000000000,
178 6 0.071428571428571,0.214285714285714,0.357142857142857,
179 6 0.500000000000000,0.642857142857143,0.785714285714286,
180 6 0.000000000000000,0.000000000000000,0.000000000000000,
181 7 0.062500000000000,0.187500000000000,0.312500000000000,
182 7 0.437500000000000,0.562500000000000,0.687500000000000,
183 7 0.812500000000000,0.000000000000000,0.000000000000000,
184 8 0.055555555555556,0.166666666666667,0.277777777777778,
185 8 0.388888888888889,0.500000000000000,0.611111111111111,
186 8 0.722222222222222,0.833333333333333,0.000000000000000,
187 9 0.050000000000000,0.150000000000000,0.250000000000000,
188 9 0.350000000000000,0.450000000000000,0.550000000000000,
189 9 0.650000000000000,0.750000000000000,0.850000000000000/
192 IF(sol2sph_flag/=0)
THEN
195 sph_work%A6(1:6,1:3,1:numnod) = zero
196 IF (
ALLOCATED(sph_work%AS))
DEALLOCATE(sph_work%AS)
197 CALL my_alloc(sph_work%AS,3,8*nsphact)
200 IF (
ALLOCATED(sph_work%AS))
DEALLOCATE(sph_work%AS6)
201 CALL my_alloc(sph_work%AS6,6,3,8*nsphact)
215 IF (nsphsol > 0)
THEN
220 IF(iparg(8,ng)==1)
GOTO 250
222 DO nelem = 1,iparg(2,ng),nvsiz
225 nft =iparg(3,ng) + offset
228 ipartsph=iparg(69,ng)
230 llt=
min(nvsiz,nel-nelem+1)
231 IF(ity==1.AND.ipartsph/=0)
THEN
233 gbuf => elbuf_tab(ng)%GBUF
234 lbuf => elbuf_tab(ng)%BUFLY(1)%LBUF(1,1,1)
235 mbuf => elbuf_tab(ng)%BUFLY(1)%MAT(1,1,1)
237 IF (iparg(28,ng)==4)
THEN
243 IF(gbuf%OFF(i)==zero) cycle
247 vx1=v(1,n1)+dt12*a(1,n1)/ms(n1)
248 vy1=v(2,n1)+dt12*a(2,n1)/ms(n1)
249 vz1=v(3,n1)+dt12*a(3,n1)/ms(n1)
251 vx2=v(1,n2)+dt12*a(1,n2)/ms(n2)
252 vy2=v(2,n2)+dt12*a(2,n2)/ms(n2)
253 vz2=v(3,n2)+dt12*a(3,n2)/ms(n2)
255 vx3=v(1,n3)+dt12*a(1,n3)/ms(n3)
256 vy3=v(2,n3)+dt12*a(2,n3)/ms(n3)
257 vz3=v(3,n3)+dt12*a(3,n3)/ms(n3)
259 vx4=v(1,n4)+dt12*a(1,n4)/ms(n4)
260 vy4=v(2,n4)+dt12*a(2,n4)/ms(n4)
261 vz4=v(3,n4)+dt12*a(3,n4)/ms(n4)
263 nsphdir=igeo(37,ixs(10,n))
265 DO kp=1,sol2sph(2,n)-sol2sph(1,n)
267 ir=irst(1,np-first_sphsol+1)
268 is=irst(2,np-first_sphsol+1)
269 it=irst(3,np-first_sphsol+1)
271 ksi = a_gauss_tetra(ir,nsphdir)
272 eta = a_gauss_tetra(is,nsphdir)
273 zeta = a_gauss_tetra(it,nsphdir)
280 vxi=phi1*vx1+phi2*vx2+phi3*vx3+phi4*vx4
281 vyi=phi1*vy1+phi2*vy2+phi3*vy3+phi4*vy4
282 vzi=phi1*vz1+phi2*vz2+phi3*vz3+phi4*vz4
285 a(1,inod)=ms(inod)*(vxi-v(1,inod))*usdt
286 a(2,inod)=ms(inod)*(vyi-v(2,inod))*usdt
287 a(3,inod)=ms(inod)*(vzi-v(3,inod))*usdt
297 IF(gbuf%OFF(i)==zero) cycle
301 vx1=v(1,n1)+dt12*a(1,n1)/ms(n1)
302 vy1=v(2,n1)+dt12*a(2,n1)/ms(n1)
303 vz1=v(3,n1)+dt12*a(3,n1)/ms(n1)
305 vx2=v(1,n2)+dt12*a(1,n2)/ms(n2)
306 vy2=v(2,n2)+dt12*a(2,n2)/ms(n2)
307 vz2=v(3,n2)+dt12*a(3,n2)/ms(n2)
309 vx3=v(1,n3)+dt12*a(1,n3)/ms(n3)
310 vy3=v(2,n3)+dt12*a(2,n3)/ms(n3)
311 vz3=v(3,n3)+dt12*a(3,n3)/ms(n3)
313 vx4=v(1,n4)+dt12*a(1,n4)/ms(n4)
314 vy4=v(2,n4)+dt12*a(2,n4)/ms(n4)
315 vz4=v(3,n4)+dt12*a(3,n4)/ms(n4)
317 vx5=v(1,n5)+dt12*a(1,n5)/ms(n5)
318 vy5=v(2,n5)+dt12*a(2,n5)/ms(n5)
319 vz5=v(3,n5)+dt12*a(3,n5)/ms(n5)
321 vx6=v(1,n6)+dt12*a(1,n6)/ms(n6)
322 vy6=v(2,n6)+dt12*a(2,n6)/ms(n6)
323 vz6=v(3,n6)+dt12*a(3,n6)/ms(n6)
325 vx7=v(1,n7)+dt12*a(1,n7)/ms(n7)
326 vy7=v(2,n7)+dt12*a(2,n7)/ms(n7)
327 vz7=v(3,n7)+dt12*a(3,n7)/ms(n7)
329 vx8=v(1,n8)+dt12*a(1,n8)/ms(n8)
330 vy8=v(2,n8)+dt12*a(2,n8)/ms(n8)
331 vz8=v(3,n8)+dt12*a(3,n8)/ms(n8)
333 nsphdir=nint((sol2sph(2,n)-sol2sph(1,n))**third)
335 DO kp=1,sol2sph(2,n)-sol2sph(1,n)
337 ir=irst(1,np-first_sphsol+1)
338 is=irst(2,np-first_sphsol
339 it=irst(3,np-first_sphsol+1)
340 ksi = a_gauss(ir,nsphdir)
341 eta = a_gauss(is,nsphdir)
342 zeta = a_gauss(it,nsphdir)
344 phi1=(one-ksi)*(one-eta)*(one-zeta)
345 phi2=(one-ksi)*(one-eta)*(one+zeta)
346 phi3=(one+ksi)*(one-eta)*(one+zeta)
347 phi4=(one+ksi)*(one-eta)*(one-zeta)
348 phi5=(one-ksi)*(one+eta)*(one-zeta)
349 phi6=(one-ksi)*(one+eta)*(one+zeta)
350 phi7=(one+ksi)*(one+eta)*(one+zeta)
351 phi8=(one+ksi)*(one+eta)*(one-zeta)
352 vxi=one_over_8*(phi1*vx1+phi2*vx2+phi3*vx3+phi4*vx4+
353 . phi5*vx5+phi6*vx6+phi7*vx7+phi8*vx8)
354 vyi=one_over_8*(phi1*vy1+phi2*vy2+phi3*vy3+phi4*vy4+
355 . phi5*vy5+phi6*vy6+phi7*vy7+phi8*vy8)
356 vzi=one_over_8*(phi1*vz1+phi2*vz2+phi3*vz3+phi4*vz4+
357 . phi5*vz5+phi6*vz6+phi7*vz7+phi8*vz8)
359 a(1,inod)=ms(inod)*(vxi-v(1,inod))*usdt
360 a(2,inod)=ms(inod)*(vyi-v(2,inod))*usdt
361 a(3,inod)=ms(inod)*(vzi-v(3,inod))*usdt
381 ALLOCATE(sph_work%ASPHR(4,
nsphr))
399 nx=xframe(3*(ic-1)+1,is)
400 ny=xframe(3*(ic-1)+2,is)
401 nz=xframe(3*(ic-1)+3,is)
402 DO ns =itask+1,nsphact,nthread
418 an=axi*nx+ayi*ny+azi*nz
431 DO ns = itask+1,
nsphr,nthread
437 axi=sph_work%ASPHR(1,ns)
438 ayi=sph_work%ASPHR(2,ns)
439 azi=sph_work%ASPHR(3,ns)
445 an=axi*nx+ayi*ny+azi*nz
463 DO ns =itask+1,nsphact,nthread
466 unm=one/
max(em30,ms(inod))
467 vxi=v(1,inod)+dt12*a(1,inod)*unm
468 vyi=v(2,inod)+dt12*a(2,inod)*unm
469 vzi=v(3,inod)+dt12*a(3,inod)*unm
470 vv=vxi*vxi+vyi*vyi+vzi*vzi
474 partsav(8,iprt)=partsav(8,iprt)+kv
480 DO ns =itask+1,nsphact,nthread
488 DO ns =itask+1,nsphact,nthread
494 alpci=get_u_geo(4,iprop)
521 IF(kxsp(2,n)<0.AND.kxsp(2,m)<0)cycle
534 CALL weight0(xi,yi,zi,xj,yj,zj,dij,wght)
535 betai=one+betaxi*(xi-xj)+betayi*(yi-yj)+betazi*(zi-zj)
540 betaj=one+betaxj*(xj-xi)+betayj*(yj-yi)+betazj*(zj-zi)
541 wght=wght*(alphai*betai+alphaj*betaj)*half
542 fact=two*wght/(rhoi+rhoj)
543 wax=axj-axi+ms(inod)*(vxj-vxi)*dtinv
544 way=ayj-ayi+ms(inod)*(vyj-vyi)*dtinv
545 waz=azj-azi+ms(inod)*(vzj-vzi)*dtinv
546 faci= alpci*spbuf(12,m)*fact
551 IF(kxsp(2,n)<=0.AND.xsphr(13,nn)<=0)cycle
559 axj=sph_work%ASPHR(1,nn)
560 ayj=sph_work%ASPHR(2,nn)
561 azj=sph_work%ASPHR(3,nn)
564 CALL weight0(xi,yi,zi,xj,yj,zj,dij,wght)
565 betai=one+betaxi*(xi-xj)+betayi*(yi-yj)+betazi*(zi-zj)
570 betaj=one+betaxj*(xj-xi)+betayj*(yj-yi)+betazj*(zj-zi)
571 wght=wght*(alphai*betai+alphaj*betaj)*half
572 fact=two*wght/(rhoi+rhoj)
573 wax=axj-axi+ms(inod)*(vxj-vxi)*dtinv
574 way=ayj-ayi+ms(inod)*(vyj-vyi)*dtinv
575 waz=azj-azi+ms(inod)*(vzj-vzi)*dtinv
576 faci= alpci*xsphr(8,nn)*fact
578 wa(myadrn+1)=wa(myadrn+1)+faci*wax
579 wa(myadrn+2)=wa(myadrn+2)+faci*way
580 wa(myadrn+3)=wa(myadrn+3)+faci*waz
585 DO j=kxsp(5,n)+1,kxsp(5,n)+nvoiss
591 IF(kxsp(2,n)<=0.AND.kxsp(2,sm)<=0)cycle
606 CALL weight0(xi,yi,zi,xj,yj,zj,dij,wght)
608 betai=one +betaxi*(xi-xj)+betayi*(yi-yj)+betazi*(zi-zj)
616 betaj=one+betaxj*(xj-xi)+betayj*(yj-yi)+betazj*(zj-zi)
617 wght=wght*(alphai*betai+alphaj*betaj)*half
618 fact=alpci*two*spbuf(12,sm)*wght/(rhoi+rhoj)
619 wax=axj-axi+ms(inod)*(vxj-vxi)*dtinv
620 way=ayj-ayi+ms(inod)*(vyj-vyi)*dtinv
621 waz=azj-azi+ms(inod)*(vzj-vzi)*dtinv
626 IF(kxsp(2,n)<=0.AND.xsphr(13,sm)<=0)cycle
627 nc=mod(-js,nspcond+1)
641 CALL weight0(xi,yi,zi,xj,yj,zj,dij,wght)
642 betai=one +betaxi*(xi-xj)+betayi*(yi-yj)+betazi*(zi-zj)
650 betaj=one+betaxj*(xj-xi)+betayj*(yj-yi)+betazj*(zj-zi)
651 wght=wght*(alphai*betai+alphaj*betaj)*half
652 fact=alpci*two*xsphr(8,sm)*wght/(rhoi+rhoj)
653 wax=axj-axi+ms(inod)*(vxj-vxi)*dtinv
654 way=ayj-ayi+ms(inod)*(vyj-vyi)*dtinv
655 waz=azj-azi+ms(inod)*(vzj-vzi)*dtinv
657 wa(myadrn+1)=wa(myadrn+1)+fact*wax
658 wa(myadrn+2)=wa(myadrn+2)+fact*way
659 wa(myadrn+3)=wa(myadrn+3)+fact*waz
671 DO ns=itask+1,nsphact,nthread
675 a(1,inod)=a(1,inod)+wa(myadrn+1)
676 a(2,inod)=a(2,inod)+wa(myadrn+2)
677 a(3,inod)=a(3,inod)+wa(myadrn+3)
686 IF(sph2sol(n)==0)
THEN
688 a(1,inod)=a(1,inod)+wa(myadrn+1)
689 a(2,inod)=a(2,inod)+wa(myadrn+2)
690 a(3,inod)=a(3,inod)+wa(myadrn+3)
692 ELSEIF (sol2sph_typ(sph2sol(n))==4)
THEN
698 a(1,inod)=a(1,inod)+wa(myadrn+1)
699 a(2,inod)=a(2,inod)+wa(myadrn+2)
700 a(3,inod)=a(3,inod)+wa(myadrn+3)
709 ir=irst(1,n-first_sphsol+1)
710 is=irst(2,n-first_sphsol+1)
711 it=irst(3,n-first_sphsol+1)
712 nsphdir=igeo(37,ixs(10,nsol))
714 ksi = a_gauss(ir,nsphdir)
715 eta = a_gauss(is,nsphdir)
716 zeta = a_gauss(it,nsphdir)
718 phi1=one_over_8*(one-ksi)*(one-eta)*(one-zeta)
719 phi2=one_over_8*(one-ksi)*(one-eta)*(one+zeta)
720 phi3=one_over_8*(one+ksi)*(one-eta)*(one+zeta)
721 phi4=one_over_8*(one+ksi)*(one-eta)*(one-zeta)
724 sph_work%AS(1,ii)=phi1*wa(myadrn+1)
725 sph_work%AS(2,ii)=phi1*wa(myadrn+2)
726 sph_work%AS(3,ii)=phi1*wa(myadrn+3)
729 sph_work%AS(1,ii)=phi2*wa(myadrn+1)
730 sph_work%AS(2,ii)=phi2*wa(myadrn+2)
731 sph_work%AS(3,ii)=phi2*wa(myadrn+3)
734 sph_work%AS(1,ii)=phi3*wa(myadrn+1)
735 sph_work%AS(2,ii)=phi3*wa(myadrn+2)
736 sph_work%AS(3,ii)=phi3*wa(myadrn+3)
739 sph_work%AS(1,ii)=phi4*wa(myadrn+1)
740 sph_work%AS(2,ii)=phi4*wa(myadrn+2)
741 sph_work%AS(3,ii)=phi4*wa(myadrn+3)
749 a(1,inod)=a(1,inod)+wa(myadrn+1)
750 a(2,inod)=a(2,inod)+wa(myadrn+2)
751 a(3,inod)=a(3,inod)+wa(myadrn+3)
764 ir=irst(1,n-first_sphsol+1)
765 is=irst(2,n-first_sphsol+1)
766 it=irst(3,n-first_sphsol+1)
767 nsphdir=nint((sol2sph(2,nsol)-sol2sph(1,nsol))**third)
769 ksi = a_gauss(ir,nsphdir)
770 eta = a_gauss(is,nsphdir)
771 zeta = a_gauss(it,nsphdir)
773 phi1=one_over_8*(one-ksi)*(one-eta)*(one-zeta)
774 phi2=one_over_8*(one-ksi)*(one-eta)*(one+zeta)
775 phi3=one_over_8*(one+ksi)*(one-eta)*(one+zeta)
776 phi4=one_over_8*(one+ksi)*(one-eta)*(one-zeta)
777 phi5=one_over_8*(one-ksi)*(one+eta)*(one-zeta)
778 phi6=one_over_8*(one-ksi)*(one+eta)*(one+zeta)
779 phi7=one_over_8*(one+ksi)*(one+eta)*(one+zeta)
780 phi8=one_over_8*(one+ksi)*(one+eta)*(one-zeta)
783 sph_work%AS(1,ii)=phi1*wa(myadrn+1)
784 sph_work%AS(2,ii)=phi1*wa(myadrn+2)
785 sph_work%AS(3,ii)=phi1*wa(myadrn+3)
788 sph_work%AS(1,ii)=phi2*wa(myadrn+1)
789 sph_work%AS(2,ii)=phi2*wa(myadrn+2)
790 sph_work%AS(3,ii)=phi2*wa(myadrn+3)
793 sph_work%AS(1,ii)=phi3*wa(myadrn+1)
794 sph_work%AS(2,ii)=phi3*wa(myadrn+2)
795 sph_work%AS(3,ii)=phi3*wa(myadrn+3)
798 sph_work%AS(1,ii)=phi4*wa(myadrn+1)
799 sph_work%AS(2,ii)=phi4*wa(myadrn+2)
800 sph_work%AS(3,ii)=phi4*wa(myadrn+3)
803 sph_work%AS(1,ii)=phi5*wa(myadrn+1)
804 sph_work%AS(2,ii)=phi5*wa(myadrn+2)
805 sph_work%AS(3,ii)=phi5*wa(myadrn+3)
808 sph_work%AS(1,ii)=phi6*wa(myadrn+1)
809 sph_work%AS(2,ii)=phi6*wa(myadrn+2)
810 sph_work%AS(3,ii)=phi6*wa(myadrn+3)
813 sph_work%AS(1,ii)=phi7*wa(myadrn+1)
814 sph_work%AS(2,ii)=phi7*wa(myadrn+2)
815 sph_work%AS(3,ii)=phi7*wa(myadrn+3)
818 sph_work%AS(1,ii)=phi8*wa(myadrn+1)
819 sph_work%AS(2,ii)=phi8*wa(myadrn+2)
820 sph_work%AS(3,ii)=phi8*wa(myadrn+3)
830 IF(sph2sol(n)/=0)
THEN
831 IF (sol2sph_typ(sph2sol(n))==4)
THEN
851 sph_work%A6(j,1,n1)=sph_work%A6(j,1,n1)+sph_work%AS6(j,1,ii)
852 sph_work%A6(j,2,n1)=sph_work%A6(j,2,n1)+sph_work%AS6(j,2,ii)
853 sph_work%A6(j,3,n1)=sph_work%A6(j,3,n1)+sph_work%AS6(j,3,ii)
856 sph_work%A6(j,1,n2)=sph_work%A6(j,1,n2)+sph_work%AS6(j,1,ii)
857 sph_work%A6(j,2,n2)=sph_work%A6(j,2,n2)+sph_work%AS6(j,2,ii)
858 sph_work%A6(j,3,n2)=sph_work%A6(j,3,n2)+sph_work%AS6(j,3,ii)
861 sph_work%A6(j,1,n3)=sph_work%A6(j,1,n3)+sph_work%AS6(j,1,ii)
862 sph_work%A6(j,2,n3)=sph_work%A6(j,2,n3)+sph_work%AS6(j,2,ii)
863 sph_work%A6(j,3,n3)=sph_work%A6(j,3,n3)+sph_work%AS6(j,3,ii)
866 sph_work%A6(j,1,n4)=sph_work%A6(j,1,n4)+sph_work%AS6(j,1,ii)
867 sph_work%A6(j,2,n4)=sph_work%A6(j,2,n4)+sph_work%AS6(j,2,ii)
868 sph_work%A6(j,3,n4)=sph_work%A6(j,3,n4)+sph_work%AS6(j,3,ii)
900 sph_work%A6(j,1,n1)=sph_work%A6(j,1,n1)+sph_work%AS6(j,1,ii)
901 sph_work%A6(j,2,n1)=sph_work%A6(j,2,n1)+sph_work%AS6(j,2,ii)
902 sph_work%A6(j,3,n1)=sph_work%A6(j,3,n1)+sph_work%AS6(j,3,ii)
905 sph_work%A6(j,1,n2)=sph_work%A6(j,1,n2)+sph_work%AS6(j,1,ii)
906 sph_work%A6(j,2,n2)=sph_work%A6(j,2,n2)+sph_work%AS6(j,2,ii)
907 sph_work%A6(j,3,n2)=sph_work%A6(j,3,n2)+sph_work%AS6(j,3,ii)
910 sph_work%A6(j,1,n3)=sph_work%A6(j,1,n3)+sph_work%AS6(j,1,ii)
911 sph_work%A6(j,2,n3)=sph_work%A6(j,2,n3)+sph_work%AS6(j,2,ii)
912 sph_work%A6(j,3,n3)=sph_work%A6(j,3,n3)+sph_work%AS6(j,3,ii)
915 sph_work%A6(j,1,n4)=sph_work%A6(j,1,n4)+sph_work%AS6(j,1,ii)
916 sph_work%A6(j,2,n4)=sph_work%A6(j,2,n4)+sph_work%AS6(j,2,ii)
917 sph_work%A6(j,3,n4)=sph_work%A6(j,3,n4)+sph_work%AS6(j,3,ii)
920 sph_work%A6(j,1,n5)=sph_work%A6(j,1,n5)+sph_work%AS6(j,1,ii)
921 sph_work%A6(j,2,n5)=sph_work%A6(j,2,n5)+sph_work%AS6(j,2,ii)
922 sph_work%A6(j,3,n5)=sph_work%A6(j,3,n5)+sph_work%AS6(j,3,ii)
925 sph_work%A6(j,1,n6)=sph_work%A6(j,1,n6)+sph_work%AS6(j,1,ii)
926 sph_work%A6(j,2,n6)=sph_work%A6(j,2,n6)+sph_work%AS6(j,2,ii)
927 sph_work%A6(j,3,n6)=sph_work%A6(j,3,n6)+sph_work%AS6(j,3,ii)
930 sph_work%A6(j,1,n7)=sph_work%A6(j,1,n7)+sph_work%AS6(j,1,ii)
931 sph_work%A6(j,2,n7)=sph_work%A6(j,2,n7)+sph_work%AS6(j,2,ii)
932 sph_work%A6(j,3,n7)=sph_work%A6(j,3,n7)+sph_work%AS6(j,3,ii)
935 sph_work%A6(j,1,n8)=sph_work%A6(j,1,n8)+sph_work%AS6(j,1,ii)
936 sph_work%A6(j,2,n8)=sph_work%A6(j,2,n8)+sph_work%AS6(j,2,ii)
937 sph_work%A6(j,3,n8)=sph_work%A6(j,3,n8)+sph_work%AS6(j,3,ii)
949 IF ((sol2sph_flag > 0).AND.(itask==0))
THEN
952 lenr = iad_elem(1,nspmd+1)-iad_elem(1,1)
954 1 sph_work%A6 ,sph_work%ITAG ,iad_elem ,fr_elem,sz,
959 IF(sph_work%ITAG(n)/=0)
THEN
960 a(1,n)=a(1,n)+sph_work%A6(1,1,n)+sph_work%A6(2,1,n)+sph_work%A6(3,1,n)
961 . +sph_work%A6(4,1,n)+sph_work%A6(5,1,n)+sph_work%A6(6,1,n)
962 a(2,n)=a(2,n)+sph_work%A6(1,2,n)+sph_work%A6(2,2,n)+sph_work%A6(3,2,n)
963 . +sph_work%A6(4,2,n)+sph_work%A6(5,2,n)+sph_work%A6(6,2,n)
964 a(3,n)=a(3,n)+sph_work%A6(1,3,n)+sph_work%A6(2,3,n)+sph_work%A6(3,3,n)
965 . +sph_work%A6(4,3,n)+sph_work%A6(5,3,n)+sph_work%A6(6,3,n)
976 DO ns =itask+1,nsphact,nthread
979 unm=one/
max(em30,ms(inod))
980 vxi=v(1,inod)+dt12*a(1,inod)*unm
981 vyi=v(2,inod)+dt12*a(2,inod)*unm
982 vzi=v(3,inod)+dt12*a(3,inod)*unm
983 vv=vxi*vxi+vyi*vyi+vzi*vzi
987 partsav(8,iprt)=partsav(8,iprt)-kv
993 IF(nspmd>1 .AND. itask==0.AND.
ALLOCATED(sph_work%ASPHR))
DEALLOCATE(sph_work%ASPHR)