Skip to content

Commit

Permalink
【GPUPS】fix load model in pscore mode (PaddlePaddle#78)
Browse files Browse the repository at this point in the history
* fix load model;test=develop

* fix load model;test=develop

* fix load model;test=develop
  • Loading branch information
danleifeng authored Aug 4, 2022
1 parent 0610216 commit a59566c
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions paddle/fluid/distributed/ps/table/memory_sparse_table.cc
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,12 @@ int32_t MemorySparseTable::Load(const std::string& path,
size_t feature_value_size =
_value_accesor->GetAccessorInfo().size / sizeof(float);

#ifdef PADDLE_WITH_HETERPS
int thread_num = _real_local_shard_num;
#else
int thread_num = _real_local_shard_num < 15 ? _real_local_shard_num : 15;
#endif

omp_set_num_threads(thread_num);
#pragma omp parallel for schedule(dynamic)
for (int i = 0; i < _real_local_shard_num; ++i) {
Expand Down Expand Up @@ -168,11 +173,6 @@ int32_t MemorySparseTable::Load(const std::string& path,
int parse_size = _value_accesor->ParseFromString(++end, value.data());
value.resize(parse_size);

// for debug
for (int ii = 0; ii < parse_size; ++ii) {
VLOG(2) << "MemorySparseTable::load key: " << key << " value " << ii
<< ": " << value.data()[ii] << " local_shard: " << i;
}
}
read_channel->close();
if (err_no == -1) {
Expand Down Expand Up @@ -340,7 +340,7 @@ int32_t MemorySparseTable::Save(const std::string& dirname,

size_t file_start_idx = _avg_local_shard_num * _shard_idx;

#ifdef PADDLE_WITH_GPU_GRAPH
#ifdef PADDLE_WITH_HETERPS
int thread_num = _real_local_shard_num;
#else
int thread_num = _real_local_shard_num < 20 ? _real_local_shard_num : 20;
Expand Down

0 comments on commit a59566c

Please sign in to comment.