You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

92 lines
3.2 KiB

6 years ago
  1. syntax = "proto2";
  2. package object_detection.protos;
  3. // Messages for configuring the optimizing strategy for training object
  4. // detection models.
  5. // Top level optimizer message.
  6. message Optimizer {
  7. oneof optimizer {
  8. RMSPropOptimizer rms_prop_optimizer = 1;
  9. MomentumOptimizer momentum_optimizer = 2;
  10. AdamOptimizer adam_optimizer = 3;
  11. }
  12. optional bool use_moving_average = 4 [default = true];
  13. optional float moving_average_decay = 5 [default = 0.9999];
  14. }
  15. // Configuration message for the RMSPropOptimizer
  16. // See: https://www.tensorflow.org/api_docs/python/tf/train/RMSPropOptimizer
  17. message RMSPropOptimizer {
  18. optional LearningRate learning_rate = 1;
  19. optional float momentum_optimizer_value = 2 [default = 0.9];
  20. optional float decay = 3 [default = 0.9];
  21. optional float epsilon = 4 [default = 1.0];
  22. }
  23. // Configuration message for the MomentumOptimizer
  24. // See: https://www.tensorflow.org/api_docs/python/tf/train/MomentumOptimizer
  25. message MomentumOptimizer {
  26. optional LearningRate learning_rate = 1;
  27. optional float momentum_optimizer_value = 2 [default = 0.9];
  28. }
  29. // Configuration message for the AdamOptimizer
  30. // See: https://www.tensorflow.org/api_docs/python/tf/train/AdamOptimizer
  31. message AdamOptimizer {
  32. optional LearningRate learning_rate = 1;
  33. }
  34. // Configuration message for optimizer learning rate.
  35. message LearningRate {
  36. oneof learning_rate {
  37. ConstantLearningRate constant_learning_rate = 1;
  38. ExponentialDecayLearningRate exponential_decay_learning_rate = 2;
  39. ManualStepLearningRate manual_step_learning_rate = 3;
  40. CosineDecayLearningRate cosine_decay_learning_rate = 4;
  41. }
  42. }
  43. // Configuration message for a constant learning rate.
  44. message ConstantLearningRate {
  45. optional float learning_rate = 1 [default = 0.002];
  46. }
  47. // Configuration message for an exponentially decaying learning rate.
  48. // See https://www.tensorflow.org/versions/master/api_docs/python/train/ \
  49. // decaying_the_learning_rate#exponential_decay
  50. message ExponentialDecayLearningRate {
  51. optional float initial_learning_rate = 1 [default = 0.002];
  52. optional uint32 decay_steps = 2 [default = 4000000];
  53. optional float decay_factor = 3 [default = 0.95];
  54. optional bool staircase = 4 [default = true];
  55. optional float burnin_learning_rate = 5 [default = 0.0];
  56. optional uint32 burnin_steps = 6 [default = 0];
  57. optional float min_learning_rate = 7 [default = 0.0];
  58. }
  59. // Configuration message for a manually defined learning rate schedule.
  60. message ManualStepLearningRate {
  61. optional float initial_learning_rate = 1 [default = 0.002];
  62. message LearningRateSchedule {
  63. optional uint32 step = 1;
  64. optional float learning_rate = 2 [default = 0.002];
  65. }
  66. repeated LearningRateSchedule schedule = 2;
  67. // Whether to linearly interpolate learning rates for steps in
  68. // [0, schedule[0].step].
  69. optional bool warmup = 3 [default = false];
  70. }
  71. // Configuration message for a cosine decaying learning rate as defined in
  72. // object_detection/utils/learning_schedules.py
  73. message CosineDecayLearningRate {
  74. optional float learning_rate_base = 1 [default = 0.002];
  75. optional uint32 total_steps = 2 [default = 4000000];
  76. optional float warmup_learning_rate = 3 [default = 0.0002];
  77. optional uint32 warmup_steps = 4 [default = 10000];
  78. optional uint32 hold_base_rate_steps = 5 [default = 0];
  79. }