error in MPI_FILE_SET_VIEW
Posted: Thu Jun 30, 2016 9:30 pm
Hi,
I am running EPW v.4.0.0 with etf_mem = .false. and encounter the following CRASH message:
The error doesn't seem to happen on task 0, though. I can run a smaller job with identical settings without problems. See below my input files for bulk WSe2.
The MPI installation is intel MPI 5.1.1 running on a Cray cluster with Intel Xeon processors.
Any help would be much appreciated.
Cheers,
Hannes
My EPW input is:
&inputepw
prefix = 'wse'
amass(1) = 183.84
amass(2) = 78.96
outdir = './'
iverbosity = 0
elph = .true.
ep_coupling = .true.
epbwrite = .true.
! epbread = .true.
epwwrite = .true.
! epwread = .true.
! kmaps = .true.
nbndsub = 22
nbndskip = 0
wannierize = .true.
num_iter = 500
proj(1) = 'Se:p'
proj(2) = 'W:d'
etf_mem = .false.
elinterp = .true.
phinterp = .true.
tshuffle2 = .true.
elecselfen = .true.
phonselfen = .false.
a2f = .false.
parallel_k = .true.
parallel_q = .false.
eptemp = 300
degaussw = 0.1 ! eV
dvscf_dir = '../phonons_8_8_2/save'
band_plot = .true.
filukk = './wse.ukk'
! filqf = './meshes/path.dat'
filkf = './meshes/path.dat'
filelph = './filelph'
nkf1 = 100
nkf2 = 100
nkf3 = 1
nk1 = 8
nk2 = 8
nk3 = 2
nqf1 = 80
nqf2 = 80
nqf3 = 20
nq1 = 8
nq2 = 8
nq3 = 2
/
20 cartesian
0.0000000 0.0000000 0.0000000 0.0156250
0.0000000 0.0000000 -0.1264255 0.0156250
0.0000000 0.1443376 0.0000000 0.0937500
0.0000000 0.1443376 -0.1264255 0.0937500
0.0000000 0.2886751 0.0000000 0.0937500
0.0000000 0.2886751 -0.1264255 0.0937500
0.0000000 0.4330127 0.0000000 0.0937500
0.0000000 0.4330127 -0.1264255 0.0937500
0.0000000 -0.5773503 0.0000000 0.0468750
0.0000000 -0.5773503 -0.1264255 0.0468750
0.1250000 0.2165064 0.0000000 0.0937500
0.1250000 0.2165064 -0.1264255 0.0937500
0.1250000 0.3608439 0.0000000 0.1875000
0.1250000 0.3608439 -0.1264255 0.1875000
0.1250000 0.5051815 0.0000000 0.1875000
0.1250000 0.5051815 -0.1264255 0.1875000
0.2500000 0.4330127 0.0000000 0.0937500
0.2500000 0.4330127 -0.1264255 0.0937500
0.2500000 0.5773503 0.0000000 0.0937500
0.2500000 0.5773503 -0.1264255 0.0937500
The underlying scf calculation fir bulk WSe2 is done with:
&CONTROL
title = ' wse ',
calculation = 'scf',
prefix = 'wse'
pseudo_dir = '/u/hhueb/QE_PSEUDOS/'
outdir = '.'
disk_io = 'low'
wf_collect= .true.
/
&SYSTEM
ecutwfc = 100.
ibrav = 4,
celldm(1)=6.202080695
celldm(3)=3.954898558
nat = 6,
ntyp = 2,
/
&ELECTRONS
diagonalization='david'
mixing_mode = 'plain'
mixing_beta = 0.7
conv_thr = 1.0d-10
/
K_POINTS automatic
8 8 2 0 0 0
ATOMIC_SPECIES
W 183.84 W.pbe-hgh.UPF
Se 78.96 Se.pbe-hgh.UPF
ATOMIC_POSITIONS (crystal)
W 1/3 2/3 1/4
W -1/3 -2/3 -1/4
Se 1/3 2/3 0.621
Se -1/3 -2/3 -0.621
Se 2/3 1/3 1.121
Se -2/3 -1/3 -1.121
I am running EPW v.4.0.0 with etf_mem = .false. and encounter the following CRASH message:
Code: Select all
task # 9
from ephwan2blochp : error # 1
error in MPI_FILE_SET_VIEW
The error doesn't seem to happen on task 0, though. I can run a smaller job with identical settings without problems. See below my input files for bulk WSe2.
The MPI installation is intel MPI 5.1.1 running on a Cray cluster with Intel Xeon processors.
Any help would be much appreciated.
Cheers,
Hannes
My EPW input is:
&inputepw
prefix = 'wse'
amass(1) = 183.84
amass(2) = 78.96
outdir = './'
iverbosity = 0
elph = .true.
ep_coupling = .true.
epbwrite = .true.
! epbread = .true.
epwwrite = .true.
! epwread = .true.
! kmaps = .true.
nbndsub = 22
nbndskip = 0
wannierize = .true.
num_iter = 500
proj(1) = 'Se:p'
proj(2) = 'W:d'
etf_mem = .false.
elinterp = .true.
phinterp = .true.
tshuffle2 = .true.
elecselfen = .true.
phonselfen = .false.
a2f = .false.
parallel_k = .true.
parallel_q = .false.
eptemp = 300
degaussw = 0.1 ! eV
dvscf_dir = '../phonons_8_8_2/save'
band_plot = .true.
filukk = './wse.ukk'
! filqf = './meshes/path.dat'
filkf = './meshes/path.dat'
filelph = './filelph'
nkf1 = 100
nkf2 = 100
nkf3 = 1
nk1 = 8
nk2 = 8
nk3 = 2
nqf1 = 80
nqf2 = 80
nqf3 = 20
nq1 = 8
nq2 = 8
nq3 = 2
/
20 cartesian
0.0000000 0.0000000 0.0000000 0.0156250
0.0000000 0.0000000 -0.1264255 0.0156250
0.0000000 0.1443376 0.0000000 0.0937500
0.0000000 0.1443376 -0.1264255 0.0937500
0.0000000 0.2886751 0.0000000 0.0937500
0.0000000 0.2886751 -0.1264255 0.0937500
0.0000000 0.4330127 0.0000000 0.0937500
0.0000000 0.4330127 -0.1264255 0.0937500
0.0000000 -0.5773503 0.0000000 0.0468750
0.0000000 -0.5773503 -0.1264255 0.0468750
0.1250000 0.2165064 0.0000000 0.0937500
0.1250000 0.2165064 -0.1264255 0.0937500
0.1250000 0.3608439 0.0000000 0.1875000
0.1250000 0.3608439 -0.1264255 0.1875000
0.1250000 0.5051815 0.0000000 0.1875000
0.1250000 0.5051815 -0.1264255 0.1875000
0.2500000 0.4330127 0.0000000 0.0937500
0.2500000 0.4330127 -0.1264255 0.0937500
0.2500000 0.5773503 0.0000000 0.0937500
0.2500000 0.5773503 -0.1264255 0.0937500
The underlying scf calculation fir bulk WSe2 is done with:
&CONTROL
title = ' wse ',
calculation = 'scf',
prefix = 'wse'
pseudo_dir = '/u/hhueb/QE_PSEUDOS/'
outdir = '.'
disk_io = 'low'
wf_collect= .true.
/
&SYSTEM
ecutwfc = 100.
ibrav = 4,
celldm(1)=6.202080695
celldm(3)=3.954898558
nat = 6,
ntyp = 2,
/
&ELECTRONS
diagonalization='david'
mixing_mode = 'plain'
mixing_beta = 0.7
conv_thr = 1.0d-10
/
K_POINTS automatic
8 8 2 0 0 0
ATOMIC_SPECIES
W 183.84 W.pbe-hgh.UPF
Se 78.96 Se.pbe-hgh.UPF
ATOMIC_POSITIONS (crystal)
W 1/3 2/3 1/4
W -1/3 -2/3 -1/4
Se 1/3 2/3 0.621
Se -1/3 -2/3 -0.621
Se 2/3 1/3 1.121
Se -2/3 -1/3 -1.121