hess8. Weekly Comparision Swap
    Updated 2023-03-10
    with sushi as ( select 'ARB-Sushi' as chain, trunc(block_timestamp,'week') as weekly , count(DISTINCT(tx_hash)) as total_tx, count(DISTINCT(origin_from_address)) as total_user,
    sum(amount_in_usd) as volume, avg(amount_in_usd) as avg_usd,
    sum(total_tx) over (order by weekly asc) as cum_tx,
    sum(volume) over (order by weekly asc) as cum_volume
    from arbitrum.sushi.ez_swaps
    where block_timestamp >= CURRENT_DATE - 90
    and (AMOUNT_IN_USD between AMOUNT_OUT_USD - 1000 and AMOUNT_OUT_USD + 1000)
    group by 1,2
    UNION
    select 'AVA-Sushi' as chain, trunc(block_timestamp,'week') as weekly , count(DISTINCT(tx_hash)) as total_tx, count(DISTINCT(origin_from_address)) as total_user,
    sum(amount_in_usd) as volume, avg(amount_in_usd) as avg_usd,
    sum(total_tx) over (order by weekly asc) as cum_tx,
    sum(volume) over (order by weekly asc) as cum_volume
    from Avalanche.sushi.ez_swaps
    where block_timestamp >= CURRENT_DATE - 90
    and amount_in_usd <= 100000000
    group by 1,2
    UNION
    select 'OP-Sushi' as chain, trunc(block_timestamp,'week') as weekly , count(DISTINCT(tx_hash)) as total_tx, count(DISTINCT(origin_from_address)) as total_user,
    sum(amount_in_usd) as volume, avg(amount_in_usd) as avg_usd,
    sum(total_tx) over (order by weekly asc) as cum_tx,
    sum(volume) over (order by weekly asc) as cum_volume
    from optimism.sushi.ez_swaps
    where block_timestamp >= CURRENT_DATE - 90

    group by 1,2
    )
    ,
    velodrome as ( select 'OP-Velodrome' as chain, trunc(block_timestamp,'week') as weekly , count(DISTINCT(tx_hash)) as total_tx, count(DISTINCT(origin_from_address)) as total_user,
    sum(amount_in_usd) as volume, avg(amount_in_usd) as avg_usd,
    sum(total_tx) over (order by weekly asc) as cum_tx,
    sum(volume) over (order by weekly asc) as cum_volume
    from optimism.velodrome.ez_swaps
    where block_timestamp >= CURRENT_DATE - 90
    and (AMOUNT_IN_USD between AMOUNT_OUT_USD - 1000 and AMOUNT_OUT_USD + 1000)
    group by 1,2)
    Run a query to Download Data