v3-subgraph icon indicating copy to clipboard operation
v3-subgraph copied to clipboard

Unable to query long periods

Open adradr opened this issue 2 years ago • 2 comments

I am also facing this issue on this v3-subgraph too while trying to query multiple months with pagination... Error comes randomly after a few pages queried... Is it me doing something wrong or these official subgraphs are unstable?

def create_query(address, fromdate, first_value = 1000, skip_value = 0):
    first = f'first: {first_value}' if first_value > 0 else ''
    skip = f'skip: {skip_value}' if skip_value > 0 else ''
    # Making the query for gql
    query_text = '''
        query ($fromdate: Int!)
        {
            poolHourDatas(
                orderBy:periodStartUnix,
                orderDirection:desc,
                %s,
                %s,
                where:{
                    pool:"%s",
                    periodStartUnix_gt:$fromdate},
            )
        {
            periodStartUnix
            liquidity
            high
            low
            close
            feeGrowthGlobal0X128
            feeGrowthGlobal1X128
            pool {
                totalValueLockedUSD
                totalValueLockedToken1
                totalValueLockedToken0
                token0{ decimals }
                token1{ decimals }
                }
            }
        }
        ''' % (first, skip, address)

    query = gql(query_text)
    params = {
        "fromdate": fromdate
    }

def graph(network: str, address: str, fromdate: int) -> pd.DataFrame:

    if network == 'ethereum':
        url = 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3'
    elif network == 'polygon':
        # 'https://api.thegraph.com/subgraphs/name/steegecs/uniswap-v3-polygon'
        url = 'https://api.thegraph.com/subgraphs/name/zephyrys/uniswap-polygon-but-it-works'
    elif network == 'arbitrum':
        url = 'https://api.thegraph.com/subgraphs/name/ianlapham/uniswap-arbitrum-one'
    elif network == 'optimism':
        url = 'https://api.thegraph.com/subgraphs/name/ianlapham/uniswap-optimism-dev'

    sample_transport = RequestsHTTPTransport(
        url=url,
        verify=True,
        retries=5,
    )
    client = Client(
        transport=sample_transport
    )

    # Printing out query date
    fromatted_date = datetime.utcfromtimestamp(
        fromdate).strftime('%Y-%m-%d %H:%M:%S')
    now_time = datetime.now().timestamp()
    df = pd.DataFrame()
    end_date = fromdate

    # Printing query infos
    print("-------------------------------- GraphQL Query --------------------------------")
    print("Query information:")
    print("Endpoint:", url)
    print("Network:", network)
    print("Pool contract:", address)
    print("Uniswap pool info:",
          f"https://info.uniswap.org/#/{network}/pools/{address}")
    print('Querying from unix timestamp:', fromdate, '/', fromatted_date)
    print('Querying GraphQL endpoint:', url)

    # Executing query and formatting returned value
    if fromdate > now_time:
        Exception('Warning: fromdate is in the future')
    elif now_time - fromdate < 1000 * 3600:
        print('Query can be done in one request')
        query, params = create_query(address, fromdate, 1000, 0)
        response = client.execute(query, variable_values=params)
        df = pd.json_normalize(response['poolHourDatas'])
    elif now_time - fromdate > 1000 * 3600:
        print('Warning: fromdate is too far in the past, need to query multiple times')
        skip_value = 0
        while True:
            print(f'Queried {df.index.max() - df.index.min()}')
            query, params = create_query(address, fromdate, 0, skip_value)
            response = client.execute(query, variable_values=params)
            query_df = pd.json_normalize(response['poolHourDatas'])
            if len(query_df) == 0 or query_df['periodStartUnix'].min() < fromdate:
                print('No more data')
                df = df.reset_index(drop=True)
                break
            query_df.set_index(pd.to_datetime(query_df.periodStartUnix, unit='s'), inplace=True)
            # concatenate dataframes
            df = pd.concat([df, query_df])
            skip_value += 100

    else:
        Exception('Warning: error in fromdate: %s - now_time: %s' % fromdate, now_time)

    print("Query succeeded.")
    print("-------------------------------- GraphQL Query --------------------------------")

    df = df.astype(float)
    df = df.sort_index()

    return df

adradr avatar Dec 06 '22 09:12 adradr

It seems they are super unstable, sometimes they work good and sometimes dont? Anyone else got these problems?

bullish-moonrock avatar May 13 '23 15:05 bullish-moonrock

yeah I get the same

adradr avatar May 13 '23 16:05 adradr