欢迎访问yhm138的博客园博客, 你可以通过 [RSS] 的方式持续关注博客更新

MyAvatar

yhm138

HelloWorld!

【LeetCode 1635. Hopper 公司查询 I】with recursive生成2020年每月的最后一天

题目地址

https://leetcode.cn/problems/hopper-company-queries-i/description/

MySQL代码

-- CTE生成2020年每月的最后一天
WITH RECURSIVE months AS (
  SELECT LAST_DAY('2019-12-01') AS month_end -- 初始日期的前一个月的最后一天
  UNION ALL
  SELECT LAST_DAY(month_end + INTERVAL 1 DAY) -- 下一个月的最后一天
  FROM months
  WHERE month_end < '2020-12-01' -- 递归结束条件
)
,
months2020 as (
    SELECT *, DATE_FORMAT(month_end, '%Y-%m') AS `month` -- 格式化日期,只保留年月
    FROM months
    where DATE_FORMAT(month_end, '%Y')="2020"
)
,
answer_part1 as (
    SELECT  tmp1.`month`, count(distinct driver_id) as active_drivers from
    (SELECT months2020.*, d.driver_id 
    from months2020
    left join Drivers d 
    on d.join_date <= months2020.month_end) tmp1
    group by tmp1.`month`
)
,
answer_part2 as(
    SELECT months2020.`month`, count(distinct ar.ride_id) as accepted_rides
    from   months2020
    left join Rides r 
    on months2020.`month`=DATE_FORMAT(r.requested_at,"%Y-%m")
    join AcceptedRides ar
    on ar.ride_id=r.ride_id
    group by months2020.`month`
)


SELECT cast(right(m2.`month`,2) as unsigned int) as `month`
,ifnull(ap1.active_drivers ,0) as active_drivers
,ifnull(ap2.accepted_rides ,0) as accepted_rides
from months2020 m2
left join answer_part1 ap1  on ap1.`month`= m2.`month` 
left join answer_part2 ap2  on ap2.`month`= m2.`month` 

等效pandas代码

import pandas as pd
pd.set_option('display.max_rows', None)

def hopper_company(drivers: pd.DataFrame, rides: pd.DataFrame, accepted_rides: pd.DataFrame) -> pd.DataFrame:
    # Generate the last day of each month for 2020
    month_ends = pd.date_range(start='2019-12-31', end='2020-12-31', freq='M')

    # Create a DataFrame from the month_ends
    months2020_df = pd.DataFrame({'month_end': month_ends})
    months2020_df['month'] = months2020_df['month_end'].dt.to_period('M').astype(str)

    # Filter for 2020
    months2020_df = months2020_df[months2020_df['month'].str.contains("2020")]
    # print(f"months2020_df=\n{months2020_df}") # no problem

    # Part 1: Count active drivers
    drivers['join_date'] = pd.to_datetime(drivers['join_date'])
    # Ensure 'month_end' is a datetime.
    months2020_df['month_end'] = pd.to_datetime(months2020_df['month_end'])
    # Initialize a DataFrame to hold the active drivers count per month.
    active_drivers_per_month = pd.DataFrame()
    # For each month in 'months2020_df', filter 'drivers' to those who joined on or before the month's end.
    for _, month_row in months2020_df.iterrows():
        month = month_row['month']
        month_end = month_row['month_end']
        active_drivers = drivers[drivers['join_date'] <= month_end]
        unique_driver_count = active_drivers['driver_id'].nunique()
        active_drivers_per_month = active_drivers_per_month._append({'month': month, 'active_drivers': unique_driver_count}, ignore_index=True)
    # Correct the column types if necessary, month might need to be converted to string.
    active_drivers_per_month['month'] = active_drivers_per_month['month'].astype(str)



    # Part 2: Count accepted rides
    rides['request_month'] = rides['requested_at'].dt.to_period('M').astype(str)
    accepted_rides_merged = accepted_rides.merge(rides, on='ride_id')
    accepted_rides_count = accepted_rides_merged.groupby('request_month').size().reset_index(name='accepted_rides')
    accepted_rides_count.columns = ['month', 'accepted_rides']

    # Merge the results
    final_df = months2020_df.merge(active_drivers_per_month, how='left', on='month').merge(accepted_rides_count, how='left', on='month')
    final_df['month'] = final_df['month_end'].dt.month  # Convert to integer month
    final_df['active_drivers'] = final_df['active_drivers'].fillna(0).astype(int)
    final_df['accepted_rides'] = final_df['accepted_rides'].fillna(0).astype(int)

    # Select only the required columns
    final_df = final_df[['month', 'active_drivers', 'accepted_rides']]

    return final_df
posted @ 2024-01-12 11:31  yhm138  阅读(11)  评论(0编辑  收藏  举报