demo.yml 2.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. # nodes, start, end 是必须有的属性
  2. comment: "组件串联demo"
  3. # 全局配置
  4. conf:
  5. # 预测长度
  6. forecast_periods: 10
  7. # 预测粒度
  8. forecast_freq:
  9. enumType: yibu.core_model.TimeDimensionEnum
  10. enumName: Daily #Daily Weekly Monthly
  11. # 预测起始长度,如果预测时常是3-6月,forecast_start=3
  12. forecast_start: &forecast_start 0
  13. # 预测日期
  14. forecast_day: 2022-07-20
  15. # 是否处于测试模式
  16. debug: True
  17. # 是否本地模式
  18. run_local: False
  19. # 资源配置
  20. excutors_nums: 1
  21. excutors_cores: 3
  22. nodes:
  23. # 获取订单数据
  24. orderSrc:
  25. type: yibu.csv_io.CSVSource
  26. conf:
  27. path: "t_order_demo.csv"
  28. # 动销过滤
  29. nonSale:
  30. type: yibu.filter.non_sale_jh3.NonSaleFilter
  31. conf:
  32. able: True # 配置是否使用动销过滤
  33. num_day: 90 # 配置动销过滤天数
  34. in:
  35. - orderSrc: 0
  36. # 订单汇销量
  37. orderToSale:
  38. type: yibu.filter.order_aggregation_jh3.OrderAggregationFilter
  39. conf:
  40. forecast_freq: # 预测粒度
  41. enumType: yibu.core_model.TimeDimensionEnum
  42. enumName: Daily
  43. repartition: True
  44. padding: True # 是否对没有销量的日期填0
  45. in:
  46. - nonSale: 0
  47. # 简单移动平均预测
  48. simpleMovingAverage:
  49. type: yibu.filter.user_sma_zby.UserSmaFilter
  50. conf:
  51. moving_steps: 7 #第一个天数
  52. moving_steps2: 14 #第二个天数
  53. SMA1_weight: 0.6 #第一个比例
  54. SMA2_weight: 0.4 #第二个比例
  55. sum_weight: 1.0 # 整体比例
  56. in:
  57. - orderToSale: 0
  58. # 预测结果写表
  59. writeFcstData:
  60. type: yibu.csv_io.CSVDestination
  61. conf:
  62. path: "result_demo.csv"
  63. in:
  64. - simpleMovingAverage: 0
  65. # 流程图的 开始和结束 结点
  66. start: orderSrc
  67. end: writeFcstData
  68. # 这里设置Spark 相关的配置
  69. spark:
  70. appName: jh3
  71. spark.speculation: false
  72. spark.sql.adaptive.skewedJoin.enabled: false
  73. spark.sql.hive.mergeFiles: true
  74. spark.sql.adaptive.enabled: false
  75. spark.sql.adaptive.join.enabled: false
  76. spark.sql.crossJoin.enabled: true
  77. hive.metastore.uris: thrift://10.116.1.72:9083