forked from deepmodeling/abacus-develop
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparallel_k2d.cpp
More file actions
113 lines (107 loc) · 3.87 KB
/
parallel_k2d.cpp
File metadata and controls
113 lines (107 loc) · 3.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
#include "parallel_k2d.h"
#include "source_base/parallel_global.h"
#include "source_base/module_external/scalapack_connector.h"
#include "source_base/timer.h"
#include "source_base/memory.h"
template <typename TK>
void Parallel_K2D<TK>::set_para_env(int nks,
const int& nw,
const int& nb2d,
const int& nproc,
const int& my_rank,
const int& nspin) {
const int kpar = this->get_kpar();
Parallel_Global::divide_mpi_groups(nproc,
kpar,
my_rank,
this->NPROC_IN_POOL,
this->MY_POOL,
this->RANK_IN_POOL);
#ifdef __MPI
MPI_Comm_split(MPI_COMM_WORLD,
this->MY_POOL,
this->RANK_IN_POOL,
&this->POOL_WORLD_K2D);
#endif
this->Pkpoints = new Parallel_Kpoints;
this->P2D_global = new Parallel_2D;
this->P2D_pool = new Parallel_2D;
this->Pkpoints
->kinfo(nks, kpar, this->MY_POOL, this->RANK_IN_POOL, nproc, nspin);
this->P2D_global->init(nw, nw, nb2d, MPI_COMM_WORLD);
this->P2D_pool->init(nw, nw, nb2d, this->POOL_WORLD_K2D);
}
template <typename TK>
void Parallel_K2D<TK>::distribute_hsk(hamilt::Hamilt<TK>* pHamilt,
const std::vector<int>& ik_kpar,
const int& nw) {
#ifdef __MPI
ModuleBase::timer::tick("Parallel_K2D", "distribute_hsk");
for (int ipool = 0; ipool < ik_kpar.size(); ++ipool)
{
pHamilt->updateHk(ik_kpar[ipool]);
hamilt::MatrixBlock<TK> HK_global, SK_global;
pHamilt->matrix(HK_global, SK_global);
if (this->MY_POOL == this->Pkpoints->whichpool[ik_kpar[ipool]]) {
this->hk_pool.resize(this->P2D_pool->get_local_size(), 0.0);
this->sk_pool.resize(this->P2D_pool->get_local_size(), 0.0);
}
int desc_pool[9];
std::copy(this->P2D_pool->desc, this->P2D_pool->desc + 9, desc_pool);
if (this->MY_POOL != this->Pkpoints->whichpool[ik_kpar[ipool]]) {
desc_pool[1] = -1;
}
Cpxgemr2d(nw,
nw,
HK_global.p,
1,
1,
this->P2D_global->desc,
hk_pool.data(),
1,
1,
desc_pool,
this->P2D_global->blacs_ctxt);
Cpxgemr2d(nw,
nw,
SK_global.p,
1,
1,
this->P2D_global->desc,
sk_pool.data(),
1,
1,
desc_pool,
this->P2D_global->blacs_ctxt);
}
ModuleBase::Memory::record("Parallel_K2D::hsk_pool", this->P2D_pool->get_local_size() * 2 * sizeof(TK));
ModuleBase::timer::tick("Parallel_K2D", "distribute_hsk");
MPI_Barrier(MPI_COMM_WORLD);
#endif
}
template <typename TK>
void Parallel_K2D<TK>::unset_para_env() {
if (this->Pkpoints != nullptr) {
delete this->Pkpoints;
this->Pkpoints = nullptr;
}
if (this->P2D_global != nullptr) {
delete this->P2D_global;
this->P2D_global = nullptr;
}
if (this->P2D_pool != nullptr) {
delete this->P2D_pool;
this->P2D_pool = nullptr;
}
MPI_Comm_free(&this->POOL_WORLD_K2D);
}
template <typename TK>
void Parallel_K2D<TK>::set_kpar(int kpar) {
if (kpar < 1) {
ModuleBase::WARNING_QUIT("Parallel_K2D::set_kpar",
"kpar must be greater than 0.");
}
this->kpar_ = kpar;
}
template class Parallel_K2D<double>;
template class Parallel_K2D<std::complex<double>>;