当前位置:   article > 正文

计算机毕业设计hadoop+spark+hive物流快递大数据分析平台 物流预测系统 物流信息爬虫 物流大数据 机器学习 深度学习 知识图谱 大数据

计算机毕业设计hadoop+spark+hive物流快递大数据分析平台 物流预测系统 物流信息爬虫 物流大数据 机器学习 深度学习 知识图谱 大数据

1.Python爬虫采集物流数据等存入mysql和.csv文件;
2.使用pandas+numpy或者MapReduce对上面的数据集进行数据清洗生成最终上传到hdfs;
3.使用hive数据仓库完成建库建表导入.csv数据集;
4.使用hive之hive_sql进行离线计算,使用spark之scala进行实时计算;
5.将计算指标使用sqoop工具导入mysql;
6.使用Flask+echarts进行可视化大屏实现、数据查询表格实现、含预测算法;

核心算法代码分享如下:

  1. from flask import Flask, request
  2. import json
  3. from flask_mysqldb import MySQL
  4. # 创建应用对象
  5. app = Flask(__name__)
  6. app.config['MYSQL_HOST'] = 'bigdata'
  7. app.config['MYSQL_USER'] = 'root'
  8. app.config['MYSQL_PASSWORD'] = '123456'
  9. app.config['MYSQL_DB'] = '2408_meituan'
  10. mysql = MySQL(app) # this is the instantiation
  11. @app.route('/tables01')
  12. def tables01():
  13. cur = mysql.connection.cursor()
  14. cur.execute('''SELECT * FROM table01''')
  15. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  16. row_headers = ['name','goods','bads'] # this will extract row headers
  17. rv = cur.fetchall()
  18. json_data = []
  19. #print(json_data)
  20. for result in rv:
  21. json_data.append(dict(zip(row_headers, result)))
  22. return json.dumps(json_data, ensure_ascii=False)
  23. @app.route('/tables02')
  24. def tables02():
  25. cur = mysql.connection.cursor()
  26. cur.execute('''SELECT * FROM table02''')
  27. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  28. row_headers = ['name','price'] # this will extract row headers
  29. rv = cur.fetchall()
  30. json_data = []
  31. #print(json_data)
  32. for result in rv:
  33. json_data.append(dict(zip(row_headers, result)))
  34. return json.dumps(json_data, ensure_ascii=False)
  35. @app.route('/tables03')
  36. def tables03():
  37. cur = mysql.connection.cursor()
  38. cur.execute('''SELECT * FROM table03 order by goods desc limit 5''')
  39. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  40. row_headers = ['type','goods'] # this will extract row headers
  41. rv = cur.fetchall()
  42. json_data = []
  43. #print(json_data)
  44. for result in rv:
  45. json_data.append(dict(zip(row_headers, result)))
  46. return json.dumps(json_data, ensure_ascii=False)
  47. @app.route('/tables04')
  48. def tables04():
  49. cur = mysql.connection.cursor()
  50. cur.execute('''SELECT * FROM table04''')
  51. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  52. row_headers = ['shop_name','goods'] # this will extract row headers
  53. rv = cur.fetchall()
  54. json_data = []
  55. #print(json_data)
  56. for result in rv:
  57. json_data.append(dict(zip(row_headers, result)))
  58. return json.dumps(json_data, ensure_ascii=False)
  59. @app.route('/tables05')
  60. def tables05():
  61. cur = mysql.connection.cursor()
  62. cur.execute('''SELECT * FROM table05''')
  63. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  64. row_headers = ['shop_name','bads'] # this will extract row headers
  65. rv = cur.fetchall()
  66. json_data = []
  67. #print(json_data)
  68. for result in rv:
  69. json_data.append(dict(zip(row_headers, result)))
  70. return json.dumps(json_data, ensure_ascii=False)
  71. @app.route('/tables06')
  72. def tables06():
  73. cur = mysql.connection.cursor()
  74. cur.execute('''SELECT * FROM table06''')
  75. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  76. row_headers = ['addr','num'] # this will extract row headers
  77. rv = cur.fetchall()
  78. json_data = []
  79. #print(json_data)
  80. for result in rv:
  81. json_data.append(dict(zip(row_headers, result)))
  82. return json.dumps(json_data, ensure_ascii=False)
  83. @app.route('/tables07')
  84. def tables07():
  85. cur = mysql.connection.cursor()
  86. cur.execute('''SELECT * FROM table07''')
  87. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  88. row_headers = ['dish','num'] # this will extract row headers
  89. rv = cur.fetchall()
  90. json_data = []
  91. #print(json_data)
  92. for result in rv:
  93. json_data.append(dict(zip(row_headers, result)))
  94. return json.dumps(json_data, ensure_ascii=False)
  95. @app.route('/tables08')
  96. def tables08():
  97. cur = mysql.connection.cursor()
  98. cur.execute('''SELECT * FROM table08 order by serv_score desc limit 5''')
  99. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  100. row_headers = ['shop_name','serv_score'] # this will extract row headers
  101. rv = cur.fetchall()
  102. json_data = []
  103. #print(json_data)
  104. for result in rv:
  105. json_data.append(dict(zip(row_headers, result)))
  106. return json.dumps(json_data, ensure_ascii=False)
  107. @app.route('/tables09')
  108. def tables09():
  109. cur = mysql.connection.cursor()
  110. cur.execute('''SELECT * FROM table09''')
  111. #row_headers = [x[0] for x in cur.description] # this will extract row headers
  112. row_headers = ['name','nums'] # this will extract row headers
  113. rv = cur.fetchall()
  114. json_data = []
  115. #print(json_data)
  116. for result in rv:
  117. json_data.append(dict(zip(row_headers, result)))
  118. return json.dumps(json_data, ensure_ascii=False)
  119. if __name__ == "__main__":
  120. app.run(debug=True)

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/很楠不爱3/article/detail/671833
推荐阅读
相关标签
  

闽ICP备14008679号