Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Get rid of warnings (#21099)
Browse files Browse the repository at this point in the history
* Get rid of warnings

* Fix clang formatting

* Fix problem with redefinition

* Fix 'uninitialized' warning

* Update CONTRIBUTORS.md
  • Loading branch information
hankaj committed Jul 19, 2022
1 parent 7b1daf9 commit 183e012
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 11 deletions.
1 change: 1 addition & 0 deletions CONTRIBUTORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,7 @@ List of Contributors
* [Dominika Jedynak](https://github.com/DominikaJedynak)
* [Adam Grabowski](https://github.com/agrabows)
* [Kacper Pietkun](https://github.com/Kacper-Pietkun)
* [Hanna Jarlaczyńska](https://github.com/hankaj)

Label Bot
---------
Expand Down
8 changes: 8 additions & 0 deletions src/operator/c_lapack_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -1045,6 +1045,14 @@ MXNET_LAPACK_CWRAPPER12(dorgqr, double)
#undef MXNET_LAPACK_CWRAPPER2
#undef MXNET_LAPACK_CWRAPPER3
#undef MXNET_LAPACK_CWRAPPER4
#undef MXNET_LAPACK_CWRAPPER5
#undef MXNET_LAPACK_CWRAPPER6
#undef MXNET_LAPACK_CWRAPPER7
#undef MXNET_LAPACK_CWRAPPER8
#undef MXNET_LAPACK_CWRAPPER9
#undef MXNET_LAPACK_CWRAPPER10
#undef MXNET_LAPACK_CWRAPPER11
#undef MXNET_LAPACK_CWRAPPER12
#undef MXNET_LAPACK_UNAVAILABLE
#endif

Expand Down
2 changes: 0 additions & 2 deletions src/operator/quantization/dnnl/dnnl_quantized_rnn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -279,8 +279,6 @@ void DNNLQuantizedRnnOp::Forward(const OpContext& op_ctx,
char* dst_state = nullptr; // Output state
char* src_state_cell = nullptr; // Used in LSTM for cell state
char* dst_state_cell = nullptr; // Used in LSTM for cell state
const size_t cell_bytes = (default_param.bidirectional + 1) * default_param.batch_size_ *
default_param.state_size * mshadow::mshadow_sizeof(data_dtype);

if (default_param.state_outputs && req[rnn_enum::kStateOut] != kNullOp) {
stateout_mem =
Expand Down
2 changes: 1 addition & 1 deletion src/operator/tensor/broadcast_reduce-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ MSHADOW_XINLINE std::pair<AType, AType> seq_reduce_assign_block(size_t start,
const Shape<ndim>& rshape,
const Shape<ndim>& rstride) {
Shape<ndim> coord;
AType val, residual;
AType val, residual{};
Reducer::SetInitValue(val, residual);
for (size_t k = start; k < start + len; ++k) {
coord = mxnet_op::unravel(k, rshape);
Expand Down
1 change: 0 additions & 1 deletion src/operator/tensor/index_update-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,6 @@ inline void IndexUpdateOpBackwardA(const nnvm::NodeAttrs& attrs,
TBlob ograd = inputs[0];
TBlob ind = inputs[1];
const TBlob& grad_a = outputs[0];
mshadow::Stream<xpu>* s = ctx.get_stream<xpu>();
// get the number of 'ind' index
if (ind.shape_.ndim() == 0) {
ind.shape_ = Shape2(1, 1);
Expand Down
2 changes: 0 additions & 2 deletions tests/cpp/engine/threaded_engine_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,6 @@ struct Workload {
int time;
};

static uint32_t seed_ = 0xdeadbeef;

/**
* generate a list of workloads
*/
Expand Down
10 changes: 5 additions & 5 deletions tests/cpp/include/test_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -787,11 +787,11 @@ struct ScopeSet {
T saveValue_;
};

static void AssertEqual(const std::vector<NDArray*>& in_arrs,
const std::vector<NDArray*>& out_arrs,
float rtol = 1e-5,
float atol = 1e-8,
bool test_first_only = false) {
static inline void AssertEqual(const std::vector<NDArray*>& in_arrs,
const std::vector<NDArray*>& out_arrs,
float rtol = 1e-5,
float atol = 1e-8,
bool test_first_only = false) {
for (size_t j = 0; j < in_arrs.size(); ++j) {
// When test_all is fir
if (test_first_only && j == 1) {
Expand Down

0 comments on commit 183e012

Please sign in to comment.