Commit 5df339af by fangxingjun

no message

parent 765b08af
......@@ -254,41 +254,6 @@ class DwtStMtChristmasInfo(Templates):
self.df_save.show(10, truncate=False)
print(f"self.df_save.columns: {self.df_save.columns}")
# def save_data(self):
# pass
# df_save = self.df_st_handle.select(
# F.col('search_term'),
# F.col('mt_volume'),
# F.col('total_searched_products'),
# F.col('total_page3_products'),
# F.col('total_self_products'),
# F.round(F.col('total_self_products')/F.col('total_page3_products'), 4).alias('self_product_rate'),
# F.col('total_bsr_orders'),
# F.col('total_orders'),
# F.col('total_new_products'),
# F.round(F.col('total_new_products') / F.col('total_page3_products'), 4).alias('new_product_rate'),
# F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('created_time'),
# F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('updated_time'),
# F.lit(self.site_name).alias('site_name'),
# F.lit(self.date_type).alias('date_type'),
# F.lit(self.date_info).alias('date_info')
# )
# # CommonUtil.check_schema(self.spark, df_save, self.hive_tb)
#
# print(f"清除hdfs目录中:{self.hdfs_path}")
# HdfsUtils.delete_file_in_folder(self.hdfs_path)
#
# df_save = df_save.repartition(10)
# partition_by = ["site_name", "date_type", "date_info"]
# print(f"当前存储的表名为:{self.hive_tb},分区为{partition_by}", )
# df_save.write.saveAsTable(name=self.hive_tb, format='hive', mode='append', partitionBy=partition_by)
# print("success")
if __name__ == '__main__':
site_name = CommonUtil.get_sys_arg(1, None)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment