Skip to content

Commit

Permalink
[OpPerf] Implement remaining random sampling ops (apache#17502)
Browse files Browse the repository at this point in the history
* Added support for remaining random sampling ops, removed exception for sample_multinomial as it does work with random data

* Dropped unused unique_ops variable

* Added affine transform, dropped parentheses, changed 19 to 18

* Dropped unique_ops condition - no longer in use

* Fixed indentation

* Dropped unique_ops
  • Loading branch information
connorgoggins authored and Ubuntu committed Feb 19, 2020
1 parent 5daf529 commit 85b7250
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 19 deletions.
4 changes: 2 additions & 2 deletions benchmark/opperf/nd_operations/random_sampling_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@
1. Operators are automatically fetched from MXNet operator registry.
2. Default Inputs are generated. See rules/default_params.py. You can override the default values.
Below 16 random sampling Operators are covered:
Below 18 random sampling Operators are covered:
['random_exponential', 'random_gamma', 'random_generalized_negative_binomial', 'random_negative_binomial',
'random_normal', 'random_poisson', 'random_randint', 'random_uniform', 'sample_exponential', 'sample_gamma',
'sample_generalized_negative_binomial', 'sample_multinomial', 'sample_negative_binomial', 'sample_normal',
'sample_poisson', 'sample_uniform']
'sample_poisson', 'sample_uniform', 'GridGenerator', 'BilinearSampler']
"""

Expand Down
16 changes: 14 additions & 2 deletions benchmark/opperf/rules/default_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,12 @@
DEFAULT_LAM = [[1.0, 8.5]]
DEFAULT_K_ND = [[20, 49]]
DEFAULT_P_ND = [[0.4, 0.77]]
DEFAULT_GRID = [(32, 2, 256, 256)]
DEFAULT_DATA_BILINEAR = [(32, 2, 256, 256)]
DEFAULT_TRANSFORM_TYPE = ['warp', 'affine']
DEFAULT_DATA_GRIDGEN = [(32, 2, 256, 256), (256, 6)]
DEFAULT_TARGET_SHAPE = [(256, 6)]
DEFAULT_DATA_SM = [(32, 32), (64, 64)]

# For reduction operators
# NOTE: Data used is DEFAULT_DATA
Expand Down Expand Up @@ -194,7 +200,13 @@
"data_3d": DEFAULT_DATA_3d,
"label_smce": DEFAULT_LABEL_SMCE,
"label": DEFAULT_LABEL,
"index": DEFAULT_INDEX}
"index": DEFAULT_INDEX,
"grid": DEFAULT_GRID,
"data_bilinearsampler": DEFAULT_DATA_BILINEAR,
"transform_type": DEFAULT_TRANSFORM_TYPE,
"data_gridgenerator": DEFAULT_DATA_GRIDGEN,
"target_shape_gridgenerator": DEFAULT_TARGET_SHAPE,
"data_sample_multinomial": DEFAULT_DATA_SM}


# These are names of MXNet operator parameters that is of type NDArray.
Expand All @@ -207,4 +219,4 @@
"low", "high", "weight", "bias", "moving_mean", "moving_var",
"weight", "weight32", "grad", "mean", "var", "mom", "n", "d",
"v", "z", "g", "delta", "args", "indices", "shape_like", "y",
"x", "condition", "a", "index", "raveL_data", "label"]
"x", "condition", "a", "index", "raveL_data", "label", "grid"]
31 changes: 16 additions & 15 deletions benchmark/opperf/utils/op_registry_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,6 @@

from benchmark.opperf.rules.default_params import DEFAULTS_INPUTS, MX_OP_MODULE

# Operators where parameter have special criteria that cannot be cleanly automated.
# Example: sample_multinomial operator has a parameter 'data'. It expects values to sum up to 1.
unique_ops = ("sample_multinomial",)


def _select_ops(operator_names, filters=("_contrib", "_"), merge_op_forward_backward=True):
"""From a given list of operators, filter out all operator names starting with given filters and prepares
Expand Down Expand Up @@ -121,13 +117,16 @@ def prepare_op_inputs(op, arg_params):

# 3d tensor is needed by following ops
ops_3d = ['CTCLoss', 'ctc_loss']
custom_data = ['BilinearSampler', 'GridGenerator', 'sample_multinomial']

# Prepare op to default input mapping
arg_values = {}
for arg_name, arg_type in zip(arg_params["params"]["arg_names"],
arg_params["params"]["arg_types"]):
if "NDArray" in arg_type and op == "ravel_multi_index":
arg_values[arg_name] = DEFAULTS_INPUTS["ravel_data"]
elif op in custom_data and arg_name + "_" + op.lower() in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()]
elif "NDArray" in arg_type and arg_name + "_nd" in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_nd"]
elif "NDArray" in arg_type and op in ops_4d and arg_name + "_4d" in DEFAULTS_INPUTS:
Expand Down Expand Up @@ -254,13 +253,16 @@ def get_all_random_sampling_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
# Additional Random Sampling ops which do not start with "random_" or "sample_"
additional_random_sampling_ops = ['GridGenerator', 'BilinearSampler']

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()

# Filter for Random Sampling operators
random_sampling_mx_operators = {}
for op_name, _ in mx_operators.items():
if op_name.startswith(("random_", "sample_")) and op_name not in unique_ops:
if op_name.startswith(("random_", "sample_")) or op_name in additional_random_sampling_ops:
random_sampling_mx_operators[op_name] = mx_operators[op_name]
return random_sampling_mx_operators

Expand All @@ -279,8 +281,7 @@ def get_all_reduction_operators():
reduction_mx_operators = {}
for op_name, op_params in mx_operators.items():
if op_params["params"]["narg"] == 4 and \
set(["data", "axis", "exclude", "keepdims"]).issubset(set(op_params["params"]["arg_names"])) \
and op_name not in unique_ops:
set(["data", "axis", "exclude", "keepdims"]).issubset(set(op_params["params"]["arg_names"])):
reduction_mx_operators[op_name] = mx_operators[op_name]
return reduction_mx_operators

Expand All @@ -301,8 +302,8 @@ def get_all_optimizer_operators():

# Filter for Optimizer operators
optimizer_mx_operators = {}
for op_name, _ in mx_operators.items():
if op_name in optimizer_ops and op_name not in unique_ops:
for op_name, op_params in mx_operators.items():
if op_name in optimizer_ops:
optimizer_mx_operators[op_name] = mx_operators[op_name]
return optimizer_mx_operators

Expand All @@ -320,8 +321,8 @@ def get_all_sorting_searching_operators():

# Filter for Sort and search operators
sort_search_mx_operators = {}
for op_name, _ in mx_operators.items():
if op_name in sort_search_ops and op_name not in unique_ops:
for op_name, op_params in mx_operators.items():
if op_name in sort_search_ops:
sort_search_mx_operators[op_name] = mx_operators[op_name]
return sort_search_mx_operators

Expand All @@ -340,8 +341,8 @@ def get_all_rearrange_operators():

# Filter for Array Rearrange operators
rearrange_mx_operators = {}
for op_name, _ in mx_operators.items():
if op_name in rearrange_ops and op_name not in unique_ops:
for op_name, op_params in mx_operators.items():
if op_name in rearrange_ops:
rearrange_mx_operators[op_name] = mx_operators[op_name]
return rearrange_mx_operators

Expand All @@ -366,7 +367,7 @@ def get_all_indexing_routines():
# Filter for Indexing routines
indexing_mx_routines = {}
for op_name, _ in mx_operators.items():
if op_name in indexing_routines and op_name not in unique_ops:
if op_name in indexing_routines:
indexing_mx_routines[op_name] = mx_operators[op_name]
return indexing_mx_routines

Expand All @@ -386,7 +387,7 @@ def get_all_loss_operators():
# Filter for NN Loss operators
loss_mx_operators = {}
for op_name, op_params in mx_operators.items():
if op_name in loss_ops and op_name not in unique_ops:
if op_name in loss_ops:
loss_mx_operators[op_name] = mx_operators[op_name]
return loss_mx_operators

Expand Down

0 comments on commit 85b7250

Please sign in to comment.