hive2hive.json 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. {
  2. "job": {
  3. "setting": {
  4. "speed": {
  5. "channel": 3
  6. },
  7. "errorLimit": {
  8. "record": 0,
  9. "percentage": 0.02
  10. }
  11. },
  12. "content": [
  13. {
  14. "reader": {
  15. "name": "hdfsreader",
  16. "parameter": {
  17. "path": "/home/sxkj/bigdata/apache-hive-2.3.9-bin/warehouse/test_p/ct=hangzhou/*",
  18. "defaultFS": "hdfs://192.168.199.27:9000",
  19. "fileType": "text",
  20. "fieldDelimiter": ",",
  21. "column": [
  22. {
  23. "index": "0",
  24. "type": "long"
  25. },
  26. {
  27. "index": "1",
  28. "type": "string"
  29. },
  30. {
  31. "value": "${ct}",
  32. "type": "string"
  33. }
  34. ]
  35. }
  36. },
  37. "writer": {
  38. "name": "hdfswriter",
  39. "parameter": {
  40. "defaultFS": "hdfs://192.168.199.27:9000",
  41. "fileType": "text",
  42. "path": "/home/sxkj/bigdata/apache-hive-2.3.9-bin/warehouse/my_test_p/",
  43. "fileName": "my_test_p",
  44. "writeMode": "append",
  45. "fieldDelimiter": ",",
  46. "column": [
  47. {
  48. "name": "id",
  49. "type": "int"
  50. },
  51. {
  52. "name": "name",
  53. "type": "string"
  54. },
  55. {
  56. "name": "ct",
  57. "type": "string"
  58. }
  59. ]
  60. }
  61. }
  62. }
  63. ]
  64. }
  65. }