chore(trading): cleanup paginated data solution, improve performance (#4036)
This commit is contained in:
parent
bf6c13f523
commit
2e7a6a6458
@ -38,7 +38,6 @@ type Data = Item[];
|
||||
type QueryData = {
|
||||
data: Data;
|
||||
pageInfo?: PageInfo;
|
||||
totalCount?: number;
|
||||
};
|
||||
|
||||
type CombinedData = {
|
||||
@ -115,7 +114,6 @@ const paginatedSubscribe = makeDataProvider<
|
||||
first,
|
||||
append: defaultAppend,
|
||||
getPageInfo: (r) => r?.pageInfo ?? null,
|
||||
getTotalCount: (r) => r?.totalCount,
|
||||
},
|
||||
});
|
||||
|
||||
@ -373,32 +371,10 @@ describe('data provider', () => {
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
|
||||
it('fills data with nulls if pagination is enabled', async () => {
|
||||
const totalCount = 1000;
|
||||
const data: Item[] = new Array(first).fill(null).map((v, i) => ({
|
||||
cursor: i.toString(),
|
||||
node: {
|
||||
id: i.toString(),
|
||||
},
|
||||
}));
|
||||
const subscription = paginatedSubscribe(callback, client, variables);
|
||||
await resolveQuery({
|
||||
data,
|
||||
totalCount,
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
},
|
||||
});
|
||||
expect(callback.mock.calls[1][0].data?.length).toBe(totalCount);
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
|
||||
it('loads requested data blocks and inserts data with total count', async () => {
|
||||
const totalCount = 1000;
|
||||
it('loads requested data blocks', async () => {
|
||||
const subscription = paginatedSubscribe(callback, client, variables);
|
||||
await resolveQuery({
|
||||
data: generateData(),
|
||||
totalCount,
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '100',
|
||||
@ -407,168 +383,25 @@ describe('data provider', () => {
|
||||
|
||||
// load next page
|
||||
subscription.load && subscription.load();
|
||||
let lastQueryArgs =
|
||||
const lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
expect(lastQueryArgs?.variables?.['pagination']).toEqual({
|
||||
after: '100',
|
||||
first,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(100),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
hasNextPage: false,
|
||||
endCursor: '200',
|
||||
},
|
||||
});
|
||||
|
||||
// load page with skip
|
||||
subscription.load && subscription.load(500, 600);
|
||||
lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
after: '200',
|
||||
first,
|
||||
skip: 300,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(500),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '600',
|
||||
},
|
||||
});
|
||||
|
||||
// load in the gap
|
||||
subscription.load && subscription.load(400, 500);
|
||||
lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
after: '200',
|
||||
first,
|
||||
skip: 200,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(400),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '500',
|
||||
},
|
||||
});
|
||||
|
||||
// load page after last block
|
||||
subscription.load && subscription.load(700, 800);
|
||||
lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
after: '600',
|
||||
first,
|
||||
skip: 100,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(700),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '800',
|
||||
},
|
||||
});
|
||||
|
||||
// load last page shorter than expected
|
||||
subscription.load && subscription.load(950, 1050);
|
||||
lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
after: '800',
|
||||
first,
|
||||
skip: 150,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(950, 20),
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: '970',
|
||||
},
|
||||
});
|
||||
let lastCallbackArgs = callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(970);
|
||||
|
||||
// load next page when pageInfo.hasNextPage === false
|
||||
const clientQueryCallsLength = clientQuery.mock.calls.length;
|
||||
subscription.load && subscription.load();
|
||||
expect(clientQuery.mock.calls.length).toBe(clientQueryCallsLength);
|
||||
|
||||
// load last page longer than expected
|
||||
subscription.load && subscription.load(960, 1000);
|
||||
lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
after: '960',
|
||||
first,
|
||||
});
|
||||
await resolveQuery({
|
||||
data: generateData(960, 40),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '1000',
|
||||
},
|
||||
});
|
||||
lastCallbackArgs = callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(1000);
|
||||
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
|
||||
it('loads requested data blocks and inserts data without totalCount', async () => {
|
||||
const totalCount = undefined;
|
||||
const subscription = paginatedSubscribe(callback, client, variables);
|
||||
await resolveQuery({
|
||||
data: generateData(),
|
||||
totalCount,
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '100',
|
||||
},
|
||||
});
|
||||
let lastCallbackArgs = callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(undefined);
|
||||
|
||||
// load next page
|
||||
subscription.load && subscription.load();
|
||||
await resolveQuery({
|
||||
data: generateData(100),
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '200',
|
||||
},
|
||||
});
|
||||
lastCallbackArgs = callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(undefined);
|
||||
|
||||
// load last page
|
||||
subscription.load && subscription.load();
|
||||
await resolveQuery({
|
||||
data: generateData(200, 50),
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: '250',
|
||||
},
|
||||
});
|
||||
lastCallbackArgs = callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(250);
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
|
||||
it('sets total count when first page has no next page', async () => {
|
||||
const subscription = paginatedSubscribe(callback, client, variables);
|
||||
await resolveQuery({
|
||||
data: generateData(),
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: '100',
|
||||
},
|
||||
});
|
||||
const lastCallbackArgs =
|
||||
callback.mock.calls[callback.mock.calls.length - 1];
|
||||
expect(lastCallbackArgs[0].totalCount).toBe(100);
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
|
||||
@ -752,7 +585,7 @@ describe('derived data provider', () => {
|
||||
subscription.load && subscription.load();
|
||||
const lastQueryArgs =
|
||||
clientQuery.mock.calls[clientQuery.mock.calls.length - 1][0];
|
||||
expect(lastQueryArgs?.variables?.pagination).toEqual({
|
||||
expect(lastQueryArgs?.variables?.['pagination']).toEqual({
|
||||
after: '100',
|
||||
first,
|
||||
});
|
||||
|
@ -27,7 +27,6 @@ export interface UpdateCallback<Data, Delta> {
|
||||
loading: boolean;
|
||||
loaded: boolean;
|
||||
pageInfo: PageInfo | null;
|
||||
totalCount?: number;
|
||||
}
|
||||
): void;
|
||||
}
|
||||
@ -40,9 +39,7 @@ export interface Reload {
|
||||
(forceReset?: boolean): void;
|
||||
}
|
||||
|
||||
type Pagination = Schema.Pagination & {
|
||||
skip?: number;
|
||||
};
|
||||
type Pagination = Schema.Pagination;
|
||||
|
||||
export interface PageInfo {
|
||||
startCursor?: string;
|
||||
@ -83,12 +80,8 @@ export interface Append<Data> {
|
||||
data: Data | null,
|
||||
insertionData: Data | null,
|
||||
insertionPageInfo: PageInfo | null,
|
||||
pagination?: Pagination,
|
||||
totalCount?: number
|
||||
): {
|
||||
data: Data | null;
|
||||
totalCount?: number;
|
||||
};
|
||||
pagination?: Pagination
|
||||
): Data | null;
|
||||
}
|
||||
|
||||
interface GetData<QueryData, Data, Variables> {
|
||||
@ -99,10 +92,6 @@ interface GetPageInfo<QueryData> {
|
||||
(queryData: QueryData): PageInfo | null;
|
||||
}
|
||||
|
||||
interface GetTotalCount<QueryData> {
|
||||
(queryData: QueryData): number | undefined;
|
||||
}
|
||||
|
||||
interface GetDelta<SubscriptionData, Delta, Variables> {
|
||||
(
|
||||
subscriptionData: SubscriptionData,
|
||||
@ -119,44 +108,32 @@ export interface Edge<T extends Node> extends Cursor {
|
||||
node: T;
|
||||
}
|
||||
|
||||
export function defaultAppend<Data>(
|
||||
data: Data | null,
|
||||
insertionData: Data | null,
|
||||
export function defaultAppend<T extends Cursor>(
|
||||
data: T[] | null,
|
||||
insertionData: T[] | null,
|
||||
insertionPageInfo: PageInfo | null,
|
||||
pagination?: Pagination,
|
||||
totalCount?: number
|
||||
pagination?: Pagination
|
||||
) {
|
||||
if (data && insertionData && insertionPageInfo) {
|
||||
if (!(data instanceof Array) || !(insertionData instanceof Array)) {
|
||||
throw new Error(
|
||||
'data needs to be instance of Edge[] when using pagination'
|
||||
'data needs to be instance of Array[] when using pagination'
|
||||
);
|
||||
}
|
||||
if (pagination?.after) {
|
||||
if (data[data.length - 1].cursor === pagination?.after) {
|
||||
return [...data, ...insertionData];
|
||||
}
|
||||
const cursors = data.map((item) => item && item.cursor);
|
||||
const startIndex = cursors.lastIndexOf(pagination.after);
|
||||
if (startIndex !== -1) {
|
||||
const start = startIndex + 1 + (pagination.skip ?? 0);
|
||||
const end = start + insertionData.length;
|
||||
let updatedData = [
|
||||
...data.slice(0, start),
|
||||
...insertionData,
|
||||
...data.slice(end),
|
||||
];
|
||||
if (!insertionPageInfo.hasNextPage && end !== (totalCount ?? 0)) {
|
||||
// adjust totalCount if last page is shorter or longer than expected
|
||||
totalCount = end;
|
||||
updatedData = updatedData.slice(0, end);
|
||||
}
|
||||
return {
|
||||
data: updatedData,
|
||||
// increase totalCount if last page is longer than expected
|
||||
totalCount: totalCount && Math.max(updatedData.length, totalCount),
|
||||
};
|
||||
const start = startIndex + 1;
|
||||
const updatedData = [...data.slice(0, start), ...insertionData];
|
||||
return updatedData;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { data, totalCount };
|
||||
return data;
|
||||
}
|
||||
|
||||
interface DataProviderParams<
|
||||
@ -175,7 +152,6 @@ interface DataProviderParams<
|
||||
getDelta?: GetDelta<SubscriptionData, Delta, Variables>;
|
||||
pagination?: {
|
||||
getPageInfo: GetPageInfo<QueryData>;
|
||||
getTotalCount?: GetTotalCount<QueryData>;
|
||||
append: Append<Data>;
|
||||
first: number;
|
||||
};
|
||||
@ -245,7 +221,6 @@ function makeDataProviderInternal<
|
||||
let client: ApolloClient<object>;
|
||||
let subscription: Subscription[] | undefined;
|
||||
let pageInfo: PageInfo | null = null;
|
||||
let totalCount: number | undefined;
|
||||
|
||||
// notify single callback about current state, delta is passes optionally only if notify was invoked onNext
|
||||
const notify = (
|
||||
@ -258,7 +233,6 @@ function makeDataProviderInternal<
|
||||
loading,
|
||||
loaded,
|
||||
pageInfo,
|
||||
totalCount,
|
||||
...updateData,
|
||||
});
|
||||
};
|
||||
@ -301,59 +275,41 @@ function makeDataProviderInternal<
|
||||
}
|
||||
});
|
||||
|
||||
const load = async (start?: number) => {
|
||||
const load = async () => {
|
||||
if (!pagination) {
|
||||
return Promise.reject();
|
||||
}
|
||||
if (!pageInfo?.hasNextPage) {
|
||||
return null;
|
||||
}
|
||||
const paginationVariables: Pagination = {
|
||||
first: pagination.first,
|
||||
after: pageInfo?.endCursor,
|
||||
};
|
||||
if (start !== undefined && data instanceof Array) {
|
||||
if (!start) {
|
||||
paginationVariables.after = undefined;
|
||||
} else if (data && data[start - 1]) {
|
||||
paginationVariables.after = (data[start - 1] as Cursor).cursor;
|
||||
} else {
|
||||
let skip = 1;
|
||||
while (!data[start - 1 - skip] && skip <= start) {
|
||||
skip += 1;
|
||||
}
|
||||
paginationVariables.skip = skip;
|
||||
if (skip === start) {
|
||||
paginationVariables.after = undefined;
|
||||
} else {
|
||||
paginationVariables.after = (data[start - 1 - skip] as Cursor).cursor;
|
||||
}
|
||||
if (data) {
|
||||
const endCursor = (data as Cursor[])[(data as Cursor[]).length - 1]
|
||||
.cursor;
|
||||
if (endCursor) {
|
||||
paginationVariables.after = endCursor;
|
||||
}
|
||||
} else if (!pageInfo?.hasNextPage) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const res = await call(paginationVariables);
|
||||
|
||||
const insertionData = getData(res.data, variables);
|
||||
const insertionPageInfo = pagination.getPageInfo(res.data);
|
||||
({ data, totalCount } = pagination.append(
|
||||
data = pagination.append(
|
||||
data,
|
||||
insertionData,
|
||||
insertionPageInfo,
|
||||
paginationVariables,
|
||||
totalCount
|
||||
));
|
||||
paginationVariables
|
||||
);
|
||||
pageInfo = insertionPageInfo;
|
||||
totalCount =
|
||||
(pagination.getTotalCount && pagination.getTotalCount(res.data)) ??
|
||||
totalCount;
|
||||
notifyAll({ insertionData, isInsert: true });
|
||||
return insertionData;
|
||||
};
|
||||
|
||||
const setData = (updatedData: Data | null) => {
|
||||
data = updatedData;
|
||||
if (totalCount !== undefined && data instanceof Array) {
|
||||
totalCount = data.length;
|
||||
}
|
||||
};
|
||||
|
||||
const subscriptionSubscribe = () => {
|
||||
@ -400,16 +356,6 @@ function makeDataProviderInternal<
|
||||
);
|
||||
}
|
||||
pageInfo = pagination.getPageInfo(res.data);
|
||||
if (pageInfo && !pageInfo.hasNextPage) {
|
||||
totalCount = data.length;
|
||||
} else {
|
||||
totalCount =
|
||||
pagination.getTotalCount && pagination.getTotalCount(res.data);
|
||||
}
|
||||
|
||||
if (data && totalCount && data.length < totalCount) {
|
||||
data.push(...new Array(totalCount - data.length).fill(null));
|
||||
}
|
||||
}
|
||||
// if there was some updates received from subscription during initial query loading apply them on just received data
|
||||
if (update && data && updateQueue && updateQueue.length > 0) {
|
||||
@ -417,9 +363,6 @@ function makeDataProviderInternal<
|
||||
const delta = updateQueue.shift();
|
||||
if (delta) {
|
||||
setData(update(data, delta, reload, variables));
|
||||
if (totalCount !== undefined && data instanceof Array) {
|
||||
totalCount = data.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -590,7 +533,7 @@ const memoize = <
|
||||
* @param update Update<Data, Delta> function that will be executed on each onNext, it should update data base on delta, it can reload data provider
|
||||
* @param getData transforms received query data to format that will be stored in data provider
|
||||
* @param getDelta transforms delta data to format that will be stored in data provider
|
||||
* @param pagination pagination related functions { getPageInfo, getTotalCount, append, first }
|
||||
* @param pagination pagination related functions { getPageInfo, append, first }
|
||||
* @returns Subscribe<Data, Delta> subscribe function
|
||||
* @example
|
||||
* const marketMidPriceProvider = makeDataProvider<QueryData, Data, SubscriptionData, Delta>({
|
||||
|
@ -12,23 +12,13 @@ export interface useDataProviderParams<
|
||||
Variables extends OperationVariables | undefined = undefined
|
||||
> {
|
||||
dataProvider: Subscribe<Data, Delta, Variables>;
|
||||
update?: ({
|
||||
delta,
|
||||
data,
|
||||
totalCount,
|
||||
}: {
|
||||
delta?: Delta;
|
||||
data: Data | null;
|
||||
totalCount?: number;
|
||||
}) => boolean;
|
||||
update?: ({ delta, data }: { delta?: Delta; data: Data | null }) => boolean;
|
||||
insert?: ({
|
||||
insertionData,
|
||||
data,
|
||||
totalCount,
|
||||
}: {
|
||||
insertionData?: Data | null;
|
||||
data: Data | null;
|
||||
totalCount?: number;
|
||||
}) => boolean;
|
||||
variables: Variables;
|
||||
skipUpdates?: boolean;
|
||||
@ -56,7 +46,6 @@ export const useDataProvider = <
|
||||
}: useDataProviderParams<Data, Delta, Variables>) => {
|
||||
const client = useApolloClient();
|
||||
const [data, setData] = useState<Data | null>(null);
|
||||
const [totalCount, setTotalCount] = useState<number>();
|
||||
const [loading, setLoading] = useState<boolean>(!skip);
|
||||
const [error, setError] = useState<Error | undefined>(undefined);
|
||||
const flushRef = useRef<(() => void) | undefined>(undefined);
|
||||
@ -101,7 +90,6 @@ export const useDataProvider = <
|
||||
error,
|
||||
loading,
|
||||
insertionData,
|
||||
totalCount,
|
||||
isInsert,
|
||||
isUpdate,
|
||||
loaded,
|
||||
@ -116,19 +104,18 @@ export const useDataProvider = <
|
||||
(skipUpdatesRef.current ||
|
||||
(!skipUpdatesRef.current &&
|
||||
updateRef.current &&
|
||||
updateRef.current({ delta, data, totalCount })))
|
||||
updateRef.current({ delta, data })))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
isInsert &&
|
||||
insertRef.current &&
|
||||
insertRef.current({ insertionData, data, totalCount })
|
||||
insertRef.current({ insertionData, data })
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
setTotalCount(totalCount);
|
||||
setData(data);
|
||||
if (!loading && !isUpdate && updateRef.current) {
|
||||
updateRef.current({ data });
|
||||
@ -150,7 +137,6 @@ export const useDataProvider = <
|
||||
useEffect(() => {
|
||||
setData(null);
|
||||
setError(undefined);
|
||||
setTotalCount(undefined);
|
||||
if (updateRef.current) {
|
||||
updateRef.current({ data: null });
|
||||
}
|
||||
@ -184,7 +170,6 @@ export const useDataProvider = <
|
||||
flush,
|
||||
reload,
|
||||
load,
|
||||
totalCount,
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -146,7 +146,7 @@ export const DealTicket = ({
|
||||
});
|
||||
const openVolume = useOpenVolume(pubKey, market.id) ?? '0';
|
||||
const orders = activeOrders
|
||||
? activeOrders.map<OrderInfo>(({ node: order }) => ({
|
||||
? activeOrders.map<OrderInfo>((order) => ({
|
||||
isMarketOrder: order.type === OrderType.TYPE_MARKET,
|
||||
price: order.price,
|
||||
remaining: order.remaining,
|
||||
|
@ -1,4 +1,3 @@
|
||||
export * from './lib/fills-container';
|
||||
export * from './lib/use-fills-list';
|
||||
export * from './lib/fills-data-provider';
|
||||
export * from './lib/__generated__/Fills';
|
||||
|
@ -48,28 +48,32 @@ query Fills($filter: TradesFilter, $pagination: Pagination) {
|
||||
}
|
||||
}
|
||||
|
||||
subscription FillsEvent($filter: TradesSubscriptionFilter!) {
|
||||
tradesStream(filter: $filter) {
|
||||
id
|
||||
marketId
|
||||
buyOrder
|
||||
sellOrder
|
||||
buyerId
|
||||
sellerId
|
||||
aggressor
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
type
|
||||
buyerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
sellerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
fragment FillUpdateFields on TradeUpdate {
|
||||
id
|
||||
marketId
|
||||
buyOrder
|
||||
sellOrder
|
||||
buyerId
|
||||
sellerId
|
||||
aggressor
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
type
|
||||
buyerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
sellerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
}
|
||||
|
||||
subscription FillsEvent($filter: TradesSubscriptionFilter!) {
|
||||
tradesStream(filter: $filter) {
|
||||
...FillUpdateFields
|
||||
}
|
||||
}
|
||||
|
51
libs/fills/src/lib/__generated__/Fills.ts
generated
51
libs/fills/src/lib/__generated__/Fills.ts
generated
@ -15,6 +15,8 @@ export type FillsQueryVariables = Types.Exact<{
|
||||
|
||||
export type FillsQuery = { __typename?: 'Query', trades?: { __typename?: 'TradeConnection', edges: Array<{ __typename?: 'TradeEdge', cursor: string, node: { __typename?: 'Trade', id: string, createdAt: any, price: string, size: string, buyOrder: string, sellOrder: string, aggressor: Types.Side, market: { __typename?: 'Market', id: string }, buyer: { __typename?: 'Party', id: string }, seller: { __typename?: 'Party', id: string }, buyerFee: { __typename?: 'TradeFee', makerFee: string, infrastructureFee: string, liquidityFee: string }, sellerFee: { __typename?: 'TradeFee', makerFee: string, infrastructureFee: string, liquidityFee: string } } }>, pageInfo: { __typename?: 'PageInfo', startCursor: string, endCursor: string, hasNextPage: boolean, hasPreviousPage: boolean } } | null };
|
||||
|
||||
export type FillUpdateFieldsFragment = { __typename?: 'TradeUpdate', id: string, marketId: string, buyOrder: string, sellOrder: string, buyerId: string, sellerId: string, aggressor: Types.Side, price: string, size: string, createdAt: any, type: Types.TradeType, buyerFee: { __typename?: 'TradeFee', makerFee: string, infrastructureFee: string, liquidityFee: string }, sellerFee: { __typename?: 'TradeFee', makerFee: string, infrastructureFee: string, liquidityFee: string } };
|
||||
|
||||
export type FillsEventSubscriptionVariables = Types.Exact<{
|
||||
filter: Types.TradesSubscriptionFilter;
|
||||
}>;
|
||||
@ -60,6 +62,31 @@ export const FillEdgeFragmentDoc = gql`
|
||||
cursor
|
||||
}
|
||||
${FillFieldsFragmentDoc}`;
|
||||
export const FillUpdateFieldsFragmentDoc = gql`
|
||||
fragment FillUpdateFields on TradeUpdate {
|
||||
id
|
||||
marketId
|
||||
buyOrder
|
||||
sellOrder
|
||||
buyerId
|
||||
sellerId
|
||||
aggressor
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
type
|
||||
buyerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
sellerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
}
|
||||
`;
|
||||
export const FillsDocument = gql`
|
||||
query Fills($filter: TradesFilter, $pagination: Pagination) {
|
||||
trades(filter: $filter, pagination: $pagination) {
|
||||
@ -107,30 +134,10 @@ export type FillsQueryResult = Apollo.QueryResult<FillsQuery, FillsQueryVariable
|
||||
export const FillsEventDocument = gql`
|
||||
subscription FillsEvent($filter: TradesSubscriptionFilter!) {
|
||||
tradesStream(filter: $filter) {
|
||||
id
|
||||
marketId
|
||||
buyOrder
|
||||
sellOrder
|
||||
buyerId
|
||||
sellerId
|
||||
aggressor
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
type
|
||||
buyerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
sellerFee {
|
||||
makerFee
|
||||
infrastructureFee
|
||||
liquidityFee
|
||||
}
|
||||
...FillUpdateFields
|
||||
}
|
||||
}
|
||||
`;
|
||||
${FillUpdateFieldsFragmentDoc}`;
|
||||
|
||||
/**
|
||||
* __useFillsEventSubscription__
|
||||
|
@ -1,74 +1,34 @@
|
||||
import produce from 'immer';
|
||||
import orderBy from 'lodash/orderBy';
|
||||
import {} from '@vegaprotocol/utils';
|
||||
import type { PageInfo, Edge } from '@vegaprotocol/data-provider';
|
||||
import type { PageInfo, Cursor } from '@vegaprotocol/data-provider';
|
||||
import {
|
||||
makeDataProvider,
|
||||
makeDerivedDataProvider,
|
||||
defaultAppend as append,
|
||||
paginatedCombineDelta as combineDelta,
|
||||
paginatedCombineInsertionData as combineInsertionData,
|
||||
} from '@vegaprotocol/data-provider';
|
||||
import type { Market } from '@vegaprotocol/markets';
|
||||
import { marketsProvider } from '@vegaprotocol/markets';
|
||||
import { marketsMapProvider } from '@vegaprotocol/markets';
|
||||
import { FillsDocument, FillsEventDocument } from './__generated__/Fills';
|
||||
import type {
|
||||
FillsQuery,
|
||||
FillsQueryVariables,
|
||||
FillFieldsFragment,
|
||||
FillEdgeFragment,
|
||||
FillsEventSubscription,
|
||||
FillUpdateFieldsFragment,
|
||||
FillsEventSubscriptionVariables,
|
||||
} from './__generated__/Fills';
|
||||
|
||||
const update = (
|
||||
data: FillEdgeFragment[] | null,
|
||||
delta: FillsEventSubscription['tradesStream']
|
||||
) => {
|
||||
return produce(data, (draft) => {
|
||||
orderBy(delta, 'createdAt').forEach((node) => {
|
||||
if (draft === null) {
|
||||
return;
|
||||
}
|
||||
const index = draft.findIndex((edge) => edge?.node.id === node.id);
|
||||
if (index !== -1) {
|
||||
if (draft[index]?.node) {
|
||||
Object.assign(draft[index]?.node as FillFieldsFragment, node);
|
||||
}
|
||||
} else {
|
||||
const firstNode = draft[0]?.node;
|
||||
if (
|
||||
(firstNode && node.createdAt >= firstNode.createdAt) ||
|
||||
!firstNode
|
||||
) {
|
||||
const { buyerId, sellerId, marketId, ...trade } = node;
|
||||
draft.unshift({
|
||||
node: {
|
||||
...trade,
|
||||
__typename: 'Trade',
|
||||
market: {
|
||||
__typename: 'Market',
|
||||
id: marketId,
|
||||
},
|
||||
buyer: { id: buyerId, __typename: 'Party' },
|
||||
seller: { id: buyerId, __typename: 'Party' },
|
||||
},
|
||||
cursor: '',
|
||||
__typename: 'TradeEdge',
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export type Trade = Omit<FillFieldsFragment, 'market'> & {
|
||||
market?: Market;
|
||||
isLastPlaceholder?: boolean;
|
||||
};
|
||||
export type TradeEdge = Edge<Trade>;
|
||||
|
||||
const getData = (responseData: FillsQuery | null): FillEdgeFragment[] =>
|
||||
responseData?.trades?.edges || [];
|
||||
const getData = (
|
||||
responseData: FillsQuery | null
|
||||
): (FillFieldsFragment & Cursor)[] =>
|
||||
responseData?.trades?.edges.map<FillFieldsFragment & Cursor>((edge) => ({
|
||||
...edge.node,
|
||||
cursor: edge.cursor,
|
||||
})) || [];
|
||||
|
||||
const getPageInfo = (responseData: FillsQuery | null): PageInfo | null =>
|
||||
responseData?.trades?.pageInfo || null;
|
||||
@ -76,16 +36,65 @@ const getPageInfo = (responseData: FillsQuery | null): PageInfo | null =>
|
||||
const getDelta = (subscriptionData: FillsEventSubscription) =>
|
||||
subscriptionData.tradesStream || [];
|
||||
|
||||
const mapFillUpdateToFill = (
|
||||
fillUpdate: FillUpdateFieldsFragment
|
||||
): FillFieldsFragment => {
|
||||
const { buyerId, sellerId, marketId, ...fill } = fillUpdate;
|
||||
return {
|
||||
...fill,
|
||||
__typename: 'Trade',
|
||||
market: {
|
||||
__typename: 'Market',
|
||||
id: marketId,
|
||||
},
|
||||
buyer: { id: buyerId, __typename: 'Party' },
|
||||
seller: { id: buyerId, __typename: 'Party' },
|
||||
};
|
||||
};
|
||||
|
||||
const mapFillUpdateToFillWithMarket =
|
||||
(markets: Record<string, Market>) =>
|
||||
(fillUpdate: FillUpdateFieldsFragment): Trade => {
|
||||
const { market, ...fill } = mapFillUpdateToFill(fillUpdate);
|
||||
return {
|
||||
...fill,
|
||||
market: markets[market.id],
|
||||
};
|
||||
};
|
||||
|
||||
const update = <T extends Omit<FillFieldsFragment, 'market'> & Cursor>(
|
||||
data: T[] | null,
|
||||
delta: ReturnType<typeof getDelta>,
|
||||
variables: FillsQueryVariables,
|
||||
mapDeltaToData: (delta: FillUpdateFieldsFragment) => T
|
||||
): T[] => {
|
||||
const updatedData = data ? [...data] : ([] as T[]);
|
||||
orderBy(delta, 'createdAt', 'desc').forEach((fillUpdate) => {
|
||||
const index = data?.findIndex((fill) => fill.id === fillUpdate.id) ?? -1;
|
||||
if (index !== -1) {
|
||||
updatedData[index] = {
|
||||
...updatedData[index],
|
||||
...mapDeltaToData(fillUpdate),
|
||||
};
|
||||
} else if (!data?.length || fillUpdate.createdAt >= data[0].createdAt) {
|
||||
updatedData.unshift(mapDeltaToData(fillUpdate));
|
||||
}
|
||||
});
|
||||
return updatedData;
|
||||
};
|
||||
|
||||
export const fillsProvider = makeDataProvider<
|
||||
Parameters<typeof getData>['0'],
|
||||
ReturnType<typeof getData>,
|
||||
Parameters<typeof getDelta>['0'],
|
||||
ReturnType<typeof getDelta>,
|
||||
FillsQueryVariables
|
||||
FillsQueryVariables,
|
||||
FillsEventSubscriptionVariables
|
||||
>({
|
||||
query: FillsDocument,
|
||||
subscriptionQuery: FillsEventDocument,
|
||||
update,
|
||||
update: (data, delta, reload, variables) =>
|
||||
update(data, delta, variables, mapFillUpdateToFill),
|
||||
getData,
|
||||
getDelta,
|
||||
pagination: {
|
||||
@ -93,30 +102,41 @@ export const fillsProvider = makeDataProvider<
|
||||
append,
|
||||
first: 100,
|
||||
},
|
||||
getSubscriptionVariables: ({ filter }) => {
|
||||
const variables: FillsEventSubscriptionVariables = { filter: {} };
|
||||
if (filter) {
|
||||
variables.filter = {
|
||||
partyIds: filter.partyIds,
|
||||
marketIds: filter.marketIds,
|
||||
};
|
||||
}
|
||||
return variables;
|
||||
},
|
||||
});
|
||||
|
||||
export const fillsWithMarketProvider = makeDerivedDataProvider<
|
||||
(TradeEdge | null)[],
|
||||
Trade[],
|
||||
never,
|
||||
FillsQueryVariables
|
||||
>(
|
||||
[
|
||||
fillsProvider,
|
||||
(callback, client) => marketsProvider(callback, client, undefined),
|
||||
(callback, client) => marketsMapProvider(callback, client, undefined),
|
||||
],
|
||||
(partsData): (TradeEdge | null)[] =>
|
||||
(partsData[0] as ReturnType<typeof getData>)?.map(
|
||||
(edge) =>
|
||||
edge && {
|
||||
cursor: edge.cursor,
|
||||
node: {
|
||||
...edge.node,
|
||||
market: (partsData[1] as Market[]).find(
|
||||
(market) => market.id === edge.node.market.id
|
||||
),
|
||||
},
|
||||
}
|
||||
) || null,
|
||||
combineDelta<Trade, ReturnType<typeof getDelta>['0']>,
|
||||
combineInsertionData<Trade>
|
||||
(partsData, variables, prevData, parts): Trade[] | null => {
|
||||
if (prevData && parts[0].isUpdate) {
|
||||
return update(
|
||||
prevData,
|
||||
parts[0].delta as ReturnType<typeof getDelta>,
|
||||
variables,
|
||||
mapFillUpdateToFillWithMarket(partsData[1] as Record<string, Market>)
|
||||
);
|
||||
}
|
||||
return ((partsData[0] as ReturnType<typeof getData>) || []).map(
|
||||
(trade) => ({
|
||||
...trade,
|
||||
market: (partsData[1] as Record<string, Market>)[trade.market.id],
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
@ -1,10 +1,11 @@
|
||||
import compact from 'lodash/compact';
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import { useRef } from 'react';
|
||||
import { t } from '@vegaprotocol/i18n';
|
||||
import { FillsTable } from './fills-table';
|
||||
import { useFillsList } from './use-fills-list';
|
||||
import { useBottomPlaceholder } from '@vegaprotocol/datagrid';
|
||||
import { useDataProvider } from '@vegaprotocol/data-provider';
|
||||
import type * as Schema from '@vegaprotocol/types';
|
||||
import { fillsWithMarketProvider } from './fills-data-provider';
|
||||
|
||||
interface FillsManagerProps {
|
||||
partyId: string;
|
||||
@ -20,31 +21,36 @@ export const FillsManager = ({
|
||||
storeKey,
|
||||
}: FillsManagerProps) => {
|
||||
const gridRef = useRef<AgGridReact | null>(null);
|
||||
const scrolledToTop = useRef(true);
|
||||
const { data, error } = useFillsList({
|
||||
partyId,
|
||||
marketId,
|
||||
gridRef,
|
||||
scrolledToTop,
|
||||
const filter: Schema.TradesFilter | Schema.TradesSubscriptionFilter = {
|
||||
partyIds: [partyId],
|
||||
};
|
||||
if (marketId) {
|
||||
filter.marketIds = [marketId];
|
||||
}
|
||||
const { data, error } = useDataProvider({
|
||||
dataProvider: fillsWithMarketProvider,
|
||||
update: ({ data }) => {
|
||||
if (data?.length && gridRef.current?.api) {
|
||||
gridRef.current?.api.setRowData(data);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
variables: { filter },
|
||||
});
|
||||
|
||||
const bottomPlaceholderProps = useBottomPlaceholder({
|
||||
gridRef,
|
||||
});
|
||||
|
||||
const fills = compact(data).map((e) => e.node);
|
||||
|
||||
return (
|
||||
<div className="h-full relative">
|
||||
<FillsTable
|
||||
ref={gridRef}
|
||||
rowData={fills}
|
||||
partyId={partyId}
|
||||
onMarketClick={onMarketClick}
|
||||
storeKey={storeKey}
|
||||
{...bottomPlaceholderProps}
|
||||
overlayNoRowsTemplate={error ? error.message : t('No fills')}
|
||||
/>
|
||||
</div>
|
||||
<FillsTable
|
||||
ref={gridRef}
|
||||
rowData={data}
|
||||
partyId={partyId}
|
||||
onMarketClick={onMarketClick}
|
||||
storeKey={storeKey}
|
||||
{...bottomPlaceholderProps}
|
||||
overlayNoRowsTemplate={error ? error.message : t('No fills')}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
@ -1,95 +0,0 @@
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import { MockedProvider } from '@apollo/client/testing';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useFillsList } from './use-fills-list';
|
||||
import type { TradeEdge } from './fills-data-provider';
|
||||
|
||||
let mockData = null;
|
||||
let mockDataProviderData = {
|
||||
data: mockData as (TradeEdge | null)[] | null,
|
||||
error: undefined,
|
||||
loading: true,
|
||||
};
|
||||
|
||||
let updateMock: jest.Mock;
|
||||
const mockDataProvider = jest.fn((args) => {
|
||||
updateMock = args.update;
|
||||
return mockDataProviderData;
|
||||
});
|
||||
jest.mock('@vegaprotocol/data-provider', () => ({
|
||||
...jest.requireActual('@vegaprotocol/data-provider'),
|
||||
useDataProvider: jest.fn((args) => mockDataProvider(args)),
|
||||
}));
|
||||
|
||||
describe('useFillsList Hook', () => {
|
||||
const mockRefreshAgGridApi = jest.fn();
|
||||
const partyId = 'partyId';
|
||||
const gridRef = {
|
||||
current: {
|
||||
api: {
|
||||
refreshInfiniteCache: mockRefreshAgGridApi,
|
||||
getModel: () => ({ getType: () => 'infinite' }),
|
||||
},
|
||||
} as unknown as AgGridReact,
|
||||
};
|
||||
const scrolledToTop = {
|
||||
current: false,
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return proper dataProvider results', () => {
|
||||
const { result } = renderHook(
|
||||
() => useFillsList({ partyId, gridRef, scrolledToTop }),
|
||||
{
|
||||
wrapper: MockedProvider,
|
||||
}
|
||||
);
|
||||
expect(result.current).toMatchObject({
|
||||
data: null,
|
||||
error: undefined,
|
||||
loading: true,
|
||||
addNewRows: expect.any(Function),
|
||||
getRows: expect.any(Function),
|
||||
});
|
||||
});
|
||||
|
||||
it('return proper mocked results', () => {
|
||||
mockData = [
|
||||
{
|
||||
node: {
|
||||
id: 'data_id_1',
|
||||
},
|
||||
} as unknown as TradeEdge,
|
||||
{
|
||||
node: {
|
||||
id: 'data_id_2',
|
||||
},
|
||||
} as unknown as TradeEdge,
|
||||
];
|
||||
mockDataProviderData = {
|
||||
...mockDataProviderData,
|
||||
data: mockData,
|
||||
loading: false,
|
||||
};
|
||||
const { result } = renderHook(
|
||||
() => useFillsList({ partyId, gridRef, scrolledToTop }),
|
||||
{
|
||||
wrapper: MockedProvider,
|
||||
}
|
||||
);
|
||||
expect(result.current).toMatchObject({
|
||||
data: mockData,
|
||||
error: undefined,
|
||||
loading: false,
|
||||
addNewRows: expect.any(Function),
|
||||
getRows: expect.any(Function),
|
||||
});
|
||||
updateMock({ data: mockData });
|
||||
expect(mockRefreshAgGridApi).not.toHaveBeenCalled();
|
||||
updateMock({ data: mockData });
|
||||
expect(mockRefreshAgGridApi).toHaveBeenCalled();
|
||||
});
|
||||
});
|
@ -1,123 +0,0 @@
|
||||
import type { RefObject } from 'react';
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { makeInfiniteScrollGetRows } from '@vegaprotocol/data-provider';
|
||||
import type * as Types from '@vegaprotocol/types';
|
||||
import { updateGridData } from '@vegaprotocol/datagrid';
|
||||
import { useDataProvider } from '@vegaprotocol/data-provider';
|
||||
import type { Trade, TradeEdge } from './fills-data-provider';
|
||||
import { fillsWithMarketProvider } from './fills-data-provider';
|
||||
|
||||
interface Props {
|
||||
partyId: string;
|
||||
marketId?: string;
|
||||
gridRef: RefObject<AgGridReact>;
|
||||
scrolledToTop: RefObject<boolean>;
|
||||
}
|
||||
|
||||
export const useFillsList = ({
|
||||
partyId,
|
||||
marketId,
|
||||
gridRef,
|
||||
scrolledToTop,
|
||||
}: Props) => {
|
||||
const dataRef = useRef<(TradeEdge | null)[] | null>(null);
|
||||
const totalCountRef = useRef<number | undefined>(undefined);
|
||||
const newRows = useRef(0);
|
||||
const placeholderAdded = useRef(-1);
|
||||
|
||||
const makeBottomPlaceholders = useCallback((trade?: Trade) => {
|
||||
if (!trade) {
|
||||
if (placeholderAdded.current >= 0) {
|
||||
dataRef.current?.splice(placeholderAdded.current, 1);
|
||||
}
|
||||
placeholderAdded.current = -1;
|
||||
} else if (placeholderAdded.current === -1) {
|
||||
dataRef.current?.push({
|
||||
node: { ...trade, id: `${trade?.id}-1`, isLastPlaceholder: true },
|
||||
});
|
||||
placeholderAdded.current = (dataRef.current?.length || 0) - 1;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const addNewRows = useCallback(() => {
|
||||
if (newRows.current === 0) {
|
||||
return;
|
||||
}
|
||||
if (totalCountRef.current !== undefined) {
|
||||
totalCountRef.current += newRows.current;
|
||||
}
|
||||
newRows.current = 0;
|
||||
gridRef.current?.api?.refreshInfiniteCache();
|
||||
}, [gridRef]);
|
||||
|
||||
const update = useCallback(
|
||||
({
|
||||
data,
|
||||
delta,
|
||||
}: {
|
||||
data: (TradeEdge | null)[] | null;
|
||||
delta?: Trade[];
|
||||
}) => {
|
||||
if (dataRef.current?.length) {
|
||||
if (!scrolledToTop.current) {
|
||||
const createdAt = dataRef.current?.[0]?.node.createdAt;
|
||||
if (createdAt) {
|
||||
newRows.current += (delta || []).filter(
|
||||
(trade) => trade.createdAt > createdAt
|
||||
).length;
|
||||
}
|
||||
}
|
||||
return updateGridData(dataRef, data, gridRef);
|
||||
}
|
||||
dataRef.current = data;
|
||||
return false;
|
||||
},
|
||||
[gridRef, scrolledToTop]
|
||||
);
|
||||
|
||||
const insert = useCallback(
|
||||
({
|
||||
data,
|
||||
totalCount,
|
||||
}: {
|
||||
data: (TradeEdge | null)[] | null;
|
||||
totalCount?: number;
|
||||
}) => {
|
||||
totalCountRef.current = totalCount;
|
||||
return updateGridData(dataRef, data, gridRef);
|
||||
},
|
||||
[gridRef]
|
||||
);
|
||||
|
||||
const filter: Types.TradesFilter & Types.TradesSubscriptionFilter = {
|
||||
partyIds: [partyId],
|
||||
};
|
||||
if (marketId) {
|
||||
filter.marketIds = [marketId];
|
||||
}
|
||||
|
||||
const { data, error, loading, load, totalCount, reload } = useDataProvider({
|
||||
dataProvider: fillsWithMarketProvider,
|
||||
update,
|
||||
insert,
|
||||
variables: { filter },
|
||||
});
|
||||
totalCountRef.current = totalCount;
|
||||
|
||||
const getRows = makeInfiniteScrollGetRows<TradeEdge>(
|
||||
dataRef,
|
||||
totalCountRef,
|
||||
load,
|
||||
newRows
|
||||
);
|
||||
return {
|
||||
data,
|
||||
error,
|
||||
loading,
|
||||
addNewRows,
|
||||
getRows,
|
||||
reload,
|
||||
makeBottomPlaceholders,
|
||||
};
|
||||
};
|
@ -1,22 +1,12 @@
|
||||
import type { Asset } from '@vegaprotocol/assets';
|
||||
import { assetsProvider } from '@vegaprotocol/assets';
|
||||
import { assetsMapProvider } from '@vegaprotocol/assets';
|
||||
import type { Market } from '@vegaprotocol/markets';
|
||||
import { marketsProvider } from '@vegaprotocol/markets';
|
||||
import { makeInfiniteScrollGetRows } from '@vegaprotocol/data-provider';
|
||||
import { updateGridData } from '@vegaprotocol/datagrid';
|
||||
import { marketsMapProvider } from '@vegaprotocol/markets';
|
||||
import {
|
||||
makeDataProvider,
|
||||
makeDerivedDataProvider,
|
||||
useDataProvider,
|
||||
} from '@vegaprotocol/data-provider';
|
||||
import type * as Schema from '@vegaprotocol/types';
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import produce from 'immer';
|
||||
import orderBy from 'lodash/orderBy';
|
||||
import uniqBy from 'lodash/uniqBy';
|
||||
import type { RefObject } from 'react';
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import type { Filter } from './ledger-manager';
|
||||
|
||||
import type {
|
||||
LedgerEntriesQuery,
|
||||
LedgerEntriesQueryVariables,
|
||||
@ -30,171 +20,58 @@ export type LedgerEntry = LedgerEntryFragment & {
|
||||
marketReceiver: Market | null | undefined;
|
||||
};
|
||||
|
||||
export type AggregatedLedgerEntriesEdge = Schema.AggregatedLedgerEntriesEdge;
|
||||
export type AggregatedLedgerEntriesNode = Omit<
|
||||
AggregatedLedgerEntriesEdge,
|
||||
'node'
|
||||
> & {
|
||||
node: LedgerEntry;
|
||||
};
|
||||
type Edge = LedgerEntriesQuery['ledgerEntries']['edges'][number];
|
||||
|
||||
const isLedgerEntryEdge = (entry: Edge): entry is NonNullable<Edge> =>
|
||||
entry !== null;
|
||||
|
||||
const getData = (responseData: LedgerEntriesQuery | null) => {
|
||||
return responseData?.ledgerEntries?.edges || [];
|
||||
return (
|
||||
responseData?.ledgerEntries?.edges
|
||||
.filter(isLedgerEntryEdge)
|
||||
.map((edge) => edge.node) || []
|
||||
);
|
||||
};
|
||||
|
||||
export const update = (
|
||||
data: ReturnType<typeof getData> | null,
|
||||
delta: ReturnType<typeof getData>,
|
||||
reload: () => void,
|
||||
variables: LedgerEntriesQueryVariables
|
||||
) => {
|
||||
if (!data) {
|
||||
return data;
|
||||
}
|
||||
return produce(data, (draft) => {
|
||||
// A single update can contain the same order with multiple updates, so we need to find
|
||||
// the latest version of the order and only update using that
|
||||
const incoming = uniqBy(
|
||||
orderBy(delta, (entry) => entry?.node.vegaTime, 'desc'),
|
||||
'id'
|
||||
);
|
||||
|
||||
// Add or update incoming orders
|
||||
incoming.reverse().forEach((node) => {
|
||||
const index = draft.findIndex(
|
||||
(edge) => edge?.node.vegaTime === node?.node.vegaTime
|
||||
);
|
||||
const newer =
|
||||
draft.length === 0 || node?.node.vegaTime >= draft[0]?.node.vegaTime;
|
||||
let doesFilterPass = true;
|
||||
if (
|
||||
doesFilterPass &&
|
||||
variables?.dateRange?.start &&
|
||||
new Date(node?.node.vegaTime) <= new Date(variables?.dateRange?.start)
|
||||
) {
|
||||
doesFilterPass = false;
|
||||
}
|
||||
if (
|
||||
doesFilterPass &&
|
||||
variables?.dateRange?.end &&
|
||||
new Date(node?.node.vegaTime) >= new Date(variables?.dateRange?.end)
|
||||
) {
|
||||
doesFilterPass = false;
|
||||
}
|
||||
if (index !== -1) {
|
||||
if (doesFilterPass) {
|
||||
// Object.assign(draft[index]?.node, node?.node);
|
||||
if (newer) {
|
||||
draft.unshift(...draft.splice(index, 1));
|
||||
}
|
||||
} else {
|
||||
draft.splice(index, 1);
|
||||
}
|
||||
} else if (newer && doesFilterPass) {
|
||||
draft.unshift(node);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const ledgerEntriesOnlyProvider = makeDataProvider({
|
||||
const ledgerEntriesOnlyProvider = makeDataProvider<
|
||||
LedgerEntriesQuery,
|
||||
ReturnType<typeof getData>,
|
||||
never,
|
||||
never,
|
||||
LedgerEntriesQueryVariables
|
||||
>({
|
||||
query: LedgerEntriesDocument,
|
||||
getData,
|
||||
getDelta: getData,
|
||||
update,
|
||||
additionalContext: {
|
||||
isEnlargedTimeout: true,
|
||||
},
|
||||
});
|
||||
|
||||
export const ledgerEntriesProvider = makeDerivedDataProvider<
|
||||
AggregatedLedgerEntriesNode[],
|
||||
AggregatedLedgerEntriesNode[],
|
||||
LedgerEntry[],
|
||||
never,
|
||||
LedgerEntriesQueryVariables
|
||||
>(
|
||||
[
|
||||
ledgerEntriesOnlyProvider,
|
||||
(callback, client) => assetsProvider(callback, client, undefined),
|
||||
(callback, client) => marketsProvider(callback, client, undefined),
|
||||
(callback, client) => assetsMapProvider(callback, client, undefined),
|
||||
(callback, client) => marketsMapProvider(callback, client, undefined),
|
||||
],
|
||||
([entries, assets, markets]) => {
|
||||
return entries.map((edge: AggregatedLedgerEntriesEdge) => {
|
||||
const entry = edge.node;
|
||||
const asset = assets.find((asset: Asset) => asset.id === entry.assetId);
|
||||
const marketSender = markets.find(
|
||||
(market: Market) => market.id === entry.fromAccountMarketId
|
||||
);
|
||||
const marketReceiver = markets.find(
|
||||
(market: Market) => market.id === entry.toAccountMarketId
|
||||
);
|
||||
const cursor = edge?.cursor;
|
||||
return {
|
||||
node: { ...entry, asset, marketSender, marketReceiver },
|
||||
cursor,
|
||||
};
|
||||
(partsData) => {
|
||||
const entries = partsData[0] as ReturnType<typeof getData>;
|
||||
const assets = partsData[1] as Record<string, Asset>;
|
||||
const markets = partsData[1] as Record<string, Market>;
|
||||
return entries.map((entry) => {
|
||||
const asset = entry.assetId
|
||||
? (assets as Record<string, Asset>)[entry.assetId]
|
||||
: null;
|
||||
const marketSender = entry.fromAccountMarketId
|
||||
? markets[entry.fromAccountMarketId]
|
||||
: null;
|
||||
const marketReceiver = entry.toAccountMarketId
|
||||
? markets[entry.toAccountMarketId]
|
||||
: null;
|
||||
return { ...entry, asset, marketSender, marketReceiver };
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
interface Props {
|
||||
partyId: string;
|
||||
filter?: Filter;
|
||||
gridRef: RefObject<AgGridReact>;
|
||||
}
|
||||
|
||||
export const useLedgerEntriesDataProvider = ({
|
||||
partyId,
|
||||
filter,
|
||||
gridRef,
|
||||
}: Props) => {
|
||||
const dataRef = useRef<AggregatedLedgerEntriesEdge[] | null>(null);
|
||||
const totalCountRef = useRef<number>();
|
||||
|
||||
const variables = useMemo<LedgerEntriesQueryVariables>(
|
||||
() => ({
|
||||
partyId,
|
||||
dateRange: filter?.vegaTime?.value,
|
||||
pagination: {
|
||||
first: 5000,
|
||||
},
|
||||
}),
|
||||
[partyId, filter?.vegaTime?.value]
|
||||
);
|
||||
|
||||
const update = useCallback(
|
||||
({ data }: { data: AggregatedLedgerEntriesEdge[] | null }) => {
|
||||
return updateGridData(dataRef, data, gridRef);
|
||||
},
|
||||
[gridRef]
|
||||
);
|
||||
|
||||
const insert = useCallback(
|
||||
({
|
||||
data,
|
||||
totalCount,
|
||||
}: {
|
||||
data: AggregatedLedgerEntriesEdge[] | null;
|
||||
totalCount?: number;
|
||||
}) => {
|
||||
totalCountRef.current = totalCount;
|
||||
return updateGridData(dataRef, data, gridRef);
|
||||
},
|
||||
[gridRef]
|
||||
);
|
||||
|
||||
const { data, error, loading, load, totalCount, reload } = useDataProvider({
|
||||
dataProvider: ledgerEntriesProvider,
|
||||
update,
|
||||
insert,
|
||||
variables,
|
||||
skip: !variables.partyId,
|
||||
});
|
||||
totalCountRef.current = totalCount;
|
||||
|
||||
const getRows = makeInfiniteScrollGetRows<AggregatedLedgerEntriesEdge>(
|
||||
dataRef,
|
||||
totalCountRef,
|
||||
load
|
||||
);
|
||||
return { loading, error, data, getRows, reload };
|
||||
};
|
||||
|
@ -2,10 +2,12 @@ import { t } from '@vegaprotocol/i18n';
|
||||
import type * as Schema from '@vegaprotocol/types';
|
||||
import type { FilterChangedEvent } from 'ag-grid-community';
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { useCallback, useRef, useState, useMemo } from 'react';
|
||||
import { subDays, formatRFC3339 } from 'date-fns';
|
||||
import { useLedgerEntriesDataProvider } from './ledger-entries-data-provider';
|
||||
import { ledgerEntriesProvider } from './ledger-entries-data-provider';
|
||||
import type { LedgerEntriesQueryVariables } from './__generated__/LedgerEntries';
|
||||
import { LedgerTable } from './ledger-table';
|
||||
import { useDataProvider } from '@vegaprotocol/data-provider';
|
||||
import type * as Types from '@vegaprotocol/types';
|
||||
import { LedgerExportLink } from './ledger-export-link';
|
||||
|
||||
@ -26,10 +28,21 @@ export const LedgerManager = ({ partyId }: { partyId: string }) => {
|
||||
const gridRef = useRef<AgGridReact | null>(null);
|
||||
const [filter, setFilter] = useState<Filter>(defaultFilter);
|
||||
|
||||
const { data, error } = useLedgerEntriesDataProvider({
|
||||
partyId,
|
||||
filter,
|
||||
gridRef,
|
||||
const variables = useMemo<LedgerEntriesQueryVariables>(
|
||||
() => ({
|
||||
partyId,
|
||||
dateRange: filter?.vegaTime?.value,
|
||||
pagination: {
|
||||
first: 5000,
|
||||
},
|
||||
}),
|
||||
[partyId, filter?.vegaTime?.value]
|
||||
);
|
||||
|
||||
const { data, error } = useDataProvider({
|
||||
dataProvider: ledgerEntriesProvider,
|
||||
variables,
|
||||
skip: !variables.partyId,
|
||||
});
|
||||
|
||||
const onFilterChanged = useCallback((event: FilterChangedEvent) => {
|
||||
@ -37,20 +50,15 @@ export const LedgerManager = ({ partyId }: { partyId: string }) => {
|
||||
setFilter(updatedFilter);
|
||||
}, []);
|
||||
|
||||
// allow passing undefined to grid so that loading state is shown
|
||||
const extractedData = data?.map((item) => item.node);
|
||||
|
||||
return (
|
||||
<div className="h-full relative">
|
||||
<LedgerTable
|
||||
ref={gridRef}
|
||||
rowData={extractedData}
|
||||
rowData={data}
|
||||
onFilterChanged={onFilterChanged}
|
||||
overlayNoRowsTemplate={error ? error.message : t('No entries')}
|
||||
/>
|
||||
{extractedData && (
|
||||
<LedgerExportLink entries={extractedData} partyId={partyId} />
|
||||
)}
|
||||
{data && <LedgerExportLink entries={data} partyId={partyId} />}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
@ -1,23 +1,23 @@
|
||||
import { update } from './order-data-provider';
|
||||
import {
|
||||
update,
|
||||
mapOrderUpdateToOrder,
|
||||
filterOrderUpdates,
|
||||
} from './order-data-provider';
|
||||
import type { OrderUpdateFieldsFragment, OrderFieldsFragment } from '../';
|
||||
import type { Edge } from '@vegaprotocol/data-provider';
|
||||
|
||||
describe('order data provider', () => {
|
||||
it('puts incoming data in proper place', () => {
|
||||
const data = [
|
||||
{
|
||||
node: {
|
||||
id: '2',
|
||||
createdAt: new Date('2022-01-29').toISOString(),
|
||||
},
|
||||
id: '2',
|
||||
createdAt: new Date('2022-01-29').toISOString(),
|
||||
},
|
||||
|
||||
{
|
||||
node: {
|
||||
id: '1',
|
||||
createdAt: new Date('2022-01-28').toISOString(),
|
||||
},
|
||||
id: '1',
|
||||
createdAt: new Date('2022-01-28').toISOString(),
|
||||
},
|
||||
] as Edge<OrderFieldsFragment>[];
|
||||
] as OrderFieldsFragment[];
|
||||
|
||||
const delta = [
|
||||
// this one should be dropped because id don't exits and it's older than newest
|
||||
@ -52,39 +52,41 @@ describe('order data provider', () => {
|
||||
createdAt: new Date('2022-02-05').toISOString(),
|
||||
},
|
||||
] as OrderUpdateFieldsFragment[];
|
||||
const updatedData = update(data, delta, () => null, { partyId: '0x123' });
|
||||
const updatedData = update(
|
||||
data,
|
||||
filterOrderUpdates(delta),
|
||||
{ partyId: '0x123' },
|
||||
mapOrderUpdateToOrder
|
||||
);
|
||||
expect(updatedData?.findIndex((node) => node.id === delta[0].id)).toEqual(
|
||||
-1
|
||||
);
|
||||
expect(updatedData && updatedData[3].id).toEqual(delta[2].id);
|
||||
expect(updatedData && updatedData[3].updatedAt).toEqual(delta[2].updatedAt);
|
||||
expect(updatedData && updatedData[0].id).toEqual(delta[5].id);
|
||||
expect(updatedData && updatedData[1].id).toEqual(delta[3].id);
|
||||
expect(updatedData && updatedData[2].id).toEqual(delta[4].id);
|
||||
expect(updatedData && updatedData[2].updatedAt).toEqual(delta[4].updatedAt);
|
||||
expect(
|
||||
updatedData?.findIndex((edge) => edge.node.id === delta[0].id)
|
||||
).toEqual(-1);
|
||||
expect(updatedData && updatedData[3].node.id).toEqual(delta[2].id);
|
||||
expect(updatedData && updatedData[3].node.updatedAt).toEqual(
|
||||
delta[2].updatedAt
|
||||
);
|
||||
expect(updatedData && updatedData[0].node.id).toEqual(delta[5].id);
|
||||
expect(updatedData && updatedData[1].node.id).toEqual(delta[3].id);
|
||||
expect(updatedData && updatedData[2].node.id).toEqual(delta[4].id);
|
||||
expect(updatedData && updatedData[2].node.updatedAt).toEqual(
|
||||
delta[4].updatedAt
|
||||
);
|
||||
expect(update([], delta, () => null, { partyId: '0x123' })?.length).toEqual(
|
||||
5
|
||||
);
|
||||
update(
|
||||
[],
|
||||
filterOrderUpdates(delta),
|
||||
{ partyId: '0x123' },
|
||||
mapOrderUpdateToOrder
|
||||
)?.length
|
||||
).toEqual(5);
|
||||
});
|
||||
it('add only data matching date range filter', () => {
|
||||
const data = [
|
||||
{
|
||||
node: {
|
||||
id: '1',
|
||||
createdAt: new Date('2022-01-29').toISOString(),
|
||||
},
|
||||
id: '1',
|
||||
createdAt: new Date('2022-01-29').toISOString(),
|
||||
},
|
||||
{
|
||||
node: {
|
||||
id: '2',
|
||||
createdAt: new Date('2022-01-30').toISOString(),
|
||||
},
|
||||
id: '2',
|
||||
createdAt: new Date('2022-01-30').toISOString(),
|
||||
},
|
||||
] as Edge<OrderFieldsFragment>[];
|
||||
] as OrderFieldsFragment[];
|
||||
|
||||
const delta = [
|
||||
// this one should be ignored because it does not match date range
|
||||
@ -105,22 +107,23 @@ describe('order data provider', () => {
|
||||
},
|
||||
] as OrderUpdateFieldsFragment[];
|
||||
|
||||
const updatedData = update(data, delta, () => null, {
|
||||
partyId: '0x123',
|
||||
filter: {
|
||||
dateRange: { end: new Date('2022-02-01').toISOString() },
|
||||
const updatedData = update(
|
||||
data,
|
||||
filterOrderUpdates(delta),
|
||||
{
|
||||
partyId: '0x123',
|
||||
filter: {
|
||||
dateRange: { end: new Date('2022-02-01').toISOString() },
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(
|
||||
updatedData?.findIndex((edge) => edge.node.id === delta[0].id)
|
||||
).toEqual(-1);
|
||||
expect(updatedData && updatedData[0].node.id).toEqual(delta[2].id);
|
||||
expect(updatedData && updatedData[0].node.updatedAt).toEqual(
|
||||
delta[2].updatedAt
|
||||
mapOrderUpdateToOrder
|
||||
);
|
||||
expect(updatedData && updatedData[2].node.id).toEqual(delta[1].id);
|
||||
expect(updatedData && updatedData[2].node.updatedAt).toEqual(
|
||||
delta[1].updatedAt
|
||||
expect(updatedData?.findIndex((node) => node.id === delta[0].id)).toEqual(
|
||||
-1
|
||||
);
|
||||
expect(updatedData && updatedData[0].id).toEqual(delta[2].id);
|
||||
expect(updatedData && updatedData[0].updatedAt).toEqual(delta[2].updatedAt);
|
||||
expect(updatedData && updatedData[2].id).toEqual(delta[1].id);
|
||||
expect(updatedData && updatedData[2].updatedAt).toEqual(delta[1].updatedAt);
|
||||
});
|
||||
});
|
||||
|
@ -6,8 +6,8 @@ import {
|
||||
defaultAppend as append,
|
||||
} from '@vegaprotocol/data-provider';
|
||||
import type { Market } from '@vegaprotocol/markets';
|
||||
import { marketsProvider } from '@vegaprotocol/markets';
|
||||
import type { PageInfo, Edge } from '@vegaprotocol/data-provider';
|
||||
import { marketsMapProvider } from '@vegaprotocol/markets';
|
||||
import type { PageInfo, Edge, Cursor } from '@vegaprotocol/data-provider';
|
||||
import { OrderStatus } from '@vegaprotocol/types';
|
||||
import type {
|
||||
OrderFieldsFragment,
|
||||
@ -15,6 +15,7 @@ import type {
|
||||
OrdersQuery,
|
||||
OrdersUpdateSubscription,
|
||||
OrdersQueryVariables,
|
||||
OrdersUpdateSubscriptionVariables,
|
||||
} from './__generated__/Orders';
|
||||
import { OrdersDocument, OrdersUpdateDocument } from './__generated__/Orders';
|
||||
import type { ApolloClient } from '@apollo/client';
|
||||
@ -37,18 +38,21 @@ const orderMatchFilters = (
|
||||
if (!order) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (
|
||||
variables?.filter?.status &&
|
||||
!(order.status && variables.filter.status.includes(order.status))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
variables?.filter?.liveOnly &&
|
||||
!(order.status && liveOnlyOrderStatuses.includes(order.status))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
variables?.filter?.types &&
|
||||
!(order.type && variables.filter.types.includes(order.type))
|
||||
@ -76,10 +80,11 @@ const orderMatchFilters = (
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const mapOrderUpdateToOrder = (
|
||||
export const mapOrderUpdateToOrder = (
|
||||
orderUpdate: OrderUpdateFieldsFragment
|
||||
): OrderFieldsFragment => {
|
||||
const { marketId, liquidityProvisionId, ...order } = orderUpdate;
|
||||
@ -101,10 +106,36 @@ const mapOrderUpdateToOrder = (
|
||||
};
|
||||
};
|
||||
|
||||
const mapOrderUpdateToOrderWithMarket =
|
||||
(markets: Record<string, Market>) =>
|
||||
(orderUpdate: OrderUpdateFieldsFragment): Order => {
|
||||
const { market, ...order } = mapOrderUpdateToOrder(orderUpdate);
|
||||
return {
|
||||
...order,
|
||||
market: markets[market.id],
|
||||
};
|
||||
};
|
||||
|
||||
const getData = (
|
||||
responseData: OrdersQuery | null
|
||||
): Edge<OrderFieldsFragment>[] =>
|
||||
responseData?.party?.ordersConnection?.edges || [];
|
||||
): (OrderFieldsFragment & Cursor)[] =>
|
||||
responseData?.party?.ordersConnection?.edges?.map<
|
||||
OrderFieldsFragment & Cursor
|
||||
>((edge) => ({ ...edge.node, cursor: edge.cursor })) || [];
|
||||
|
||||
export const filterOrderUpdates = (
|
||||
orders: OrdersUpdateSubscription['orders']
|
||||
) => {
|
||||
// A single update can contain the same order with multiple updates, so we need to find
|
||||
// the latest version of the order and only update using that
|
||||
return orderBy(
|
||||
uniqBy(
|
||||
orderBy(orders, (order) => order.updatedAt || order.createdAt, 'desc'),
|
||||
'id'
|
||||
),
|
||||
'createdAt'
|
||||
);
|
||||
};
|
||||
|
||||
const getDelta = (
|
||||
subscriptionData: OrdersUpdateSubscription,
|
||||
@ -114,49 +145,32 @@ const getDelta = (
|
||||
if (!subscriptionData.orders) {
|
||||
return [];
|
||||
}
|
||||
return subscriptionData.orders;
|
||||
return filterOrderUpdates(subscriptionData.orders);
|
||||
};
|
||||
|
||||
export const update = (
|
||||
data: ReturnType<typeof getData> | null,
|
||||
export const update = <T extends Omit<OrderFieldsFragment, 'market'> & Cursor>(
|
||||
data: T[] | null,
|
||||
delta: ReturnType<typeof getDelta>,
|
||||
reload: () => void,
|
||||
variables?: OrdersQueryVariables
|
||||
) => {
|
||||
if (!data) {
|
||||
return data;
|
||||
}
|
||||
// A single update can contain the same order with multiple updates, so we need to find
|
||||
// the latest version of the order and only update using that
|
||||
const incoming = orderBy(
|
||||
uniqBy(
|
||||
orderBy(delta, (order) => order.updatedAt || order.createdAt, 'desc'),
|
||||
'id'
|
||||
),
|
||||
'createdAt'
|
||||
);
|
||||
|
||||
const updatedData = [...data];
|
||||
incoming.forEach((orderUpdate) => {
|
||||
const index = data.findIndex((edge) => edge.node.id === orderUpdate.id);
|
||||
const newer =
|
||||
data.length === 0 || orderUpdate.createdAt >= data[0].node.createdAt;
|
||||
variables: OrdersQueryVariables,
|
||||
mapDeltaToData: (delta: OrderUpdateFieldsFragment) => T
|
||||
): T[] => {
|
||||
const updatedData = data ? [...data] : ([] as T[]);
|
||||
delta.forEach((orderUpdate) => {
|
||||
const index = data?.findIndex((order) => order.id === orderUpdate.id) ?? -1;
|
||||
const newer = !data?.length || orderUpdate.createdAt >= data[0].createdAt;
|
||||
const doesFilterPass =
|
||||
!variables || orderMatchFilters(orderUpdate, variables);
|
||||
if (index !== -1) {
|
||||
if (doesFilterPass) {
|
||||
updatedData[index] = {
|
||||
...updatedData[index],
|
||||
node: mapOrderUpdateToOrder(orderUpdate),
|
||||
...mapDeltaToData(orderUpdate),
|
||||
};
|
||||
} else {
|
||||
updatedData.splice(index, 1);
|
||||
}
|
||||
} else if (newer && doesFilterPass) {
|
||||
updatedData.unshift({
|
||||
node: mapOrderUpdateToOrder(orderUpdate),
|
||||
cursor: '',
|
||||
});
|
||||
updatedData.unshift(mapDeltaToData(orderUpdate));
|
||||
}
|
||||
});
|
||||
return updatedData;
|
||||
@ -170,11 +184,13 @@ export const ordersProvider = makeDataProvider<
|
||||
ReturnType<typeof getData>,
|
||||
OrdersUpdateSubscription,
|
||||
ReturnType<typeof getDelta>,
|
||||
OrdersQueryVariables
|
||||
OrdersQueryVariables,
|
||||
OrdersUpdateSubscriptionVariables
|
||||
>({
|
||||
query: OrdersDocument,
|
||||
subscriptionQuery: OrdersUpdateDocument,
|
||||
update,
|
||||
update: (data, delta, reload, variables) =>
|
||||
update(data, delta, variables, mapOrderUpdateToOrder),
|
||||
getData,
|
||||
getDelta,
|
||||
pagination: {
|
||||
@ -185,6 +201,10 @@ export const ordersProvider = makeDataProvider<
|
||||
resetDelay: 1000,
|
||||
additionalContext: { isEnlargedTimeout: true },
|
||||
fetchPolicy: 'no-cache',
|
||||
getSubscriptionVariables: ({ partyId, marketIds }) => ({
|
||||
partyId,
|
||||
marketIds,
|
||||
}),
|
||||
});
|
||||
|
||||
export const activeOrdersProvider = makeDerivedDataProvider<
|
||||
@ -208,27 +228,36 @@ export const activeOrdersProvider = makeDerivedDataProvider<
|
||||
}
|
||||
const orders = partsData[0] as ReturnType<typeof getData>;
|
||||
return variables.marketId
|
||||
? orders.filter((edge) => variables.marketId === edge.node.market.id)
|
||||
? orders.filter((order) => variables.marketId === order.market.id)
|
||||
: orders;
|
||||
}
|
||||
);
|
||||
|
||||
export const ordersWithMarketProvider = makeDerivedDataProvider<
|
||||
(Order | null)[],
|
||||
Order[],
|
||||
(Order & Cursor)[],
|
||||
never,
|
||||
OrdersQueryVariables
|
||||
>(
|
||||
[
|
||||
ordersProvider,
|
||||
(callback, client) => marketsProvider(callback, client, undefined),
|
||||
(callback, client) => marketsMapProvider(callback, client, undefined),
|
||||
],
|
||||
(partsData): Order[] =>
|
||||
((partsData[0] as ReturnType<typeof getData>) || []).map((edge) => ({
|
||||
...edge.node,
|
||||
market: (partsData[1] as Market[]).find(
|
||||
(market) => market.id === edge.node.market.id
|
||||
),
|
||||
}))
|
||||
(partsData, variables, prevData, parts): Order[] => {
|
||||
if (prevData && parts[0].isUpdate) {
|
||||
return update(
|
||||
prevData,
|
||||
parts[0].delta,
|
||||
variables,
|
||||
mapOrderUpdateToOrderWithMarket(partsData[1] as Record<string, Market>)
|
||||
);
|
||||
}
|
||||
return ((partsData[0] as ReturnType<typeof getData>) || []).map(
|
||||
(order) => ({
|
||||
...order,
|
||||
market: (partsData[1] as Record<string, Market>)[order.market.id],
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export const hasActiveOrderProvider = makeDerivedDataProvider<
|
||||
@ -244,7 +273,7 @@ export const hasAmendableOrderProvider = makeDerivedDataProvider<
|
||||
>([activeOrdersProvider], (parts) => {
|
||||
const activeOrders = parts[0] as ReturnType<typeof getData>;
|
||||
const hasAmendableOrder = activeOrders.some(
|
||||
(edge) => !(edge.node.liquidityProvision || edge.node.peggedOrder)
|
||||
(order) => !(order.liquidityProvision || order.peggedOrder)
|
||||
);
|
||||
return hasAmendableOrder;
|
||||
});
|
||||
|
@ -32,7 +32,6 @@ describe('OrderListManager', () => {
|
||||
flush: jest.fn(),
|
||||
reload: jest.fn(),
|
||||
load: jest.fn(),
|
||||
totalCount: undefined,
|
||||
});
|
||||
await act(async () => {
|
||||
render(generateJsx());
|
||||
|
@ -87,7 +87,7 @@ export const OrderListManager = ({
|
||||
gridRef.current.api.setRowData(data);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -10,32 +10,33 @@ fragment TradeFields on Trade {
|
||||
}
|
||||
|
||||
query Trades($marketId: ID!, $pagination: Pagination) {
|
||||
market(id: $marketId) {
|
||||
id
|
||||
tradesConnection(pagination: $pagination) {
|
||||
edges {
|
||||
node {
|
||||
...TradeFields
|
||||
}
|
||||
cursor
|
||||
}
|
||||
pageInfo {
|
||||
startCursor
|
||||
endCursor
|
||||
hasNextPage
|
||||
hasPreviousPage
|
||||
trades(filter: { marketIds: [$marketId] }, pagination: $pagination) {
|
||||
edges {
|
||||
node {
|
||||
...TradeFields
|
||||
}
|
||||
cursor
|
||||
}
|
||||
pageInfo {
|
||||
startCursor
|
||||
endCursor
|
||||
hasNextPage
|
||||
hasPreviousPage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment TradeUpdateFields on TradeUpdate {
|
||||
id
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
marketId
|
||||
aggressor
|
||||
}
|
||||
|
||||
subscription TradesUpdate($marketId: ID!) {
|
||||
trades(marketId: $marketId) {
|
||||
id
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
marketId
|
||||
aggressor
|
||||
tradesStream(filter: { marketIds: [$marketId] }) {
|
||||
...TradeUpdateFields
|
||||
}
|
||||
}
|
||||
|
52
libs/trades/src/lib/__generated__/Trades.ts
generated
52
libs/trades/src/lib/__generated__/Trades.ts
generated
@ -11,14 +11,16 @@ export type TradesQueryVariables = Types.Exact<{
|
||||
}>;
|
||||
|
||||
|
||||
export type TradesQuery = { __typename?: 'Query', market?: { __typename?: 'Market', id: string, tradesConnection?: { __typename?: 'TradeConnection', edges: Array<{ __typename?: 'TradeEdge', cursor: string, node: { __typename?: 'Trade', id: string, price: string, size: string, createdAt: any, aggressor: Types.Side, market: { __typename?: 'Market', id: string } } }>, pageInfo: { __typename?: 'PageInfo', startCursor: string, endCursor: string, hasNextPage: boolean, hasPreviousPage: boolean } } | null } | null };
|
||||
export type TradesQuery = { __typename?: 'Query', trades?: { __typename?: 'TradeConnection', edges: Array<{ __typename?: 'TradeEdge', cursor: string, node: { __typename?: 'Trade', id: string, price: string, size: string, createdAt: any, aggressor: Types.Side, market: { __typename?: 'Market', id: string } } }>, pageInfo: { __typename?: 'PageInfo', startCursor: string, endCursor: string, hasNextPage: boolean, hasPreviousPage: boolean } } | null };
|
||||
|
||||
export type TradeUpdateFieldsFragment = { __typename?: 'TradeUpdate', id: string, price: string, size: string, createdAt: any, marketId: string, aggressor: Types.Side };
|
||||
|
||||
export type TradesUpdateSubscriptionVariables = Types.Exact<{
|
||||
marketId: Types.Scalars['ID'];
|
||||
}>;
|
||||
|
||||
|
||||
export type TradesUpdateSubscription = { __typename?: 'Subscription', trades?: Array<{ __typename?: 'TradeUpdate', id: string, price: string, size: string, createdAt: any, marketId: string, aggressor: Types.Side }> | null };
|
||||
export type TradesUpdateSubscription = { __typename?: 'Subscription', tradesStream?: Array<{ __typename?: 'TradeUpdate', id: string, price: string, size: string, createdAt: any, marketId: string, aggressor: Types.Side }> | null };
|
||||
|
||||
export const TradeFieldsFragmentDoc = gql`
|
||||
fragment TradeFields on Trade {
|
||||
@ -32,23 +34,30 @@ export const TradeFieldsFragmentDoc = gql`
|
||||
}
|
||||
}
|
||||
`;
|
||||
export const TradeUpdateFieldsFragmentDoc = gql`
|
||||
fragment TradeUpdateFields on TradeUpdate {
|
||||
id
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
marketId
|
||||
aggressor
|
||||
}
|
||||
`;
|
||||
export const TradesDocument = gql`
|
||||
query Trades($marketId: ID!, $pagination: Pagination) {
|
||||
market(id: $marketId) {
|
||||
id
|
||||
tradesConnection(pagination: $pagination) {
|
||||
edges {
|
||||
node {
|
||||
...TradeFields
|
||||
}
|
||||
cursor
|
||||
}
|
||||
pageInfo {
|
||||
startCursor
|
||||
endCursor
|
||||
hasNextPage
|
||||
hasPreviousPage
|
||||
trades(filter: {marketIds: [$marketId]}, pagination: $pagination) {
|
||||
edges {
|
||||
node {
|
||||
...TradeFields
|
||||
}
|
||||
cursor
|
||||
}
|
||||
pageInfo {
|
||||
startCursor
|
||||
endCursor
|
||||
hasNextPage
|
||||
hasPreviousPage
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -84,16 +93,11 @@ export type TradesLazyQueryHookResult = ReturnType<typeof useTradesLazyQuery>;
|
||||
export type TradesQueryResult = Apollo.QueryResult<TradesQuery, TradesQueryVariables>;
|
||||
export const TradesUpdateDocument = gql`
|
||||
subscription TradesUpdate($marketId: ID!) {
|
||||
trades(marketId: $marketId) {
|
||||
id
|
||||
price
|
||||
size
|
||||
createdAt
|
||||
marketId
|
||||
aggressor
|
||||
tradesStream(filter: {marketIds: [$marketId]}) {
|
||||
...TradeUpdateFields
|
||||
}
|
||||
}
|
||||
`;
|
||||
${TradeUpdateFieldsFragmentDoc}`;
|
||||
|
||||
/**
|
||||
* __useTradesUpdateSubscription__
|
||||
|
@ -1,4 +1,3 @@
|
||||
import compact from 'lodash/compact';
|
||||
import { useDataProvider } from '@vegaprotocol/data-provider';
|
||||
import type { AgGridReact } from 'ag-grid-react';
|
||||
import { useRef } from 'react';
|
||||
@ -19,12 +18,11 @@ export const TradesContainer = ({ marketId }: TradesContainerProps) => {
|
||||
dataProvider: tradesWithMarketProvider,
|
||||
variables: { marketId },
|
||||
});
|
||||
const trades = compact(data).map((d) => d.node);
|
||||
|
||||
return (
|
||||
<TradesTable
|
||||
ref={gridRef}
|
||||
rowData={trades}
|
||||
rowData={data}
|
||||
onClick={(price?: string) => {
|
||||
if (price) {
|
||||
updateOrder(marketId, { price });
|
||||
|
@ -3,75 +3,95 @@ import {
|
||||
makeDerivedDataProvider,
|
||||
defaultAppend as append,
|
||||
} from '@vegaprotocol/data-provider';
|
||||
import type { PageInfo, Edge } from '@vegaprotocol/data-provider';
|
||||
import type { PageInfo, Cursor } from '@vegaprotocol/data-provider';
|
||||
import type { Market } from '@vegaprotocol/markets';
|
||||
import { marketsProvider } from '@vegaprotocol/markets';
|
||||
import { marketsMapProvider } from '@vegaprotocol/markets';
|
||||
import type {
|
||||
TradesQuery,
|
||||
TradesQueryVariables,
|
||||
TradeFieldsFragment,
|
||||
TradesUpdateSubscription,
|
||||
TradeUpdateFieldsFragment,
|
||||
TradesUpdateSubscriptionVariables,
|
||||
} from './__generated__/Trades';
|
||||
import { TradesDocument, TradesUpdateDocument } from './__generated__/Trades';
|
||||
import orderBy from 'lodash/orderBy';
|
||||
import produce from 'immer';
|
||||
|
||||
export const MAX_TRADES = 500;
|
||||
|
||||
const getData = (
|
||||
responseData: TradesQuery | null
|
||||
): ({
|
||||
cursor: string;
|
||||
node: TradeFieldsFragment;
|
||||
} | null)[] => responseData?.market?.tradesConnection?.edges || [];
|
||||
): (TradeFieldsFragment & Cursor)[] =>
|
||||
responseData?.trades?.edges.map<TradeFieldsFragment & Cursor>((edge) => ({
|
||||
...edge.node,
|
||||
cursor: edge.cursor,
|
||||
})) || [];
|
||||
|
||||
const getDelta = (subscriptionData: TradesUpdateSubscription) =>
|
||||
subscriptionData?.trades || [];
|
||||
subscriptionData?.tradesStream || [];
|
||||
|
||||
const update = (
|
||||
data: ReturnType<typeof getData> | null,
|
||||
delta: ReturnType<typeof getDelta>
|
||||
) => {
|
||||
if (!data) return data;
|
||||
return produce(data, (draft) => {
|
||||
// for each incoming trade add it to the beginning and remove oldest trade
|
||||
orderBy(delta, 'createdAt', 'desc').forEach((node) => {
|
||||
const { marketId, ...nodeData } = node;
|
||||
draft.unshift({
|
||||
node: {
|
||||
...nodeData,
|
||||
__typename: 'Trade',
|
||||
market: {
|
||||
__typename: 'Market',
|
||||
id: marketId,
|
||||
},
|
||||
},
|
||||
cursor: '',
|
||||
});
|
||||
const mapTradeUpdateToTrade = (
|
||||
tradeUpdate: TradeUpdateFieldsFragment
|
||||
): TradeFieldsFragment => {
|
||||
const { marketId, ...trade } = tradeUpdate;
|
||||
return {
|
||||
...trade,
|
||||
__typename: 'Trade',
|
||||
market: {
|
||||
__typename: 'Market',
|
||||
id: marketId,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
if (draft.length > MAX_TRADES) {
|
||||
draft.pop();
|
||||
}
|
||||
});
|
||||
const mapTradeUpdateToTradeWithMarket =
|
||||
(markets: Record<string, Market>) =>
|
||||
(tradeUpdate: TradeUpdateFieldsFragment): Trade => {
|
||||
const { market, ...trade } = mapTradeUpdateToTrade(tradeUpdate);
|
||||
return {
|
||||
...trade,
|
||||
market: markets[market.id],
|
||||
};
|
||||
};
|
||||
|
||||
const update = <T extends Omit<TradeFieldsFragment, 'market'> & Cursor>(
|
||||
data: T[] | null,
|
||||
delta: ReturnType<typeof getDelta>,
|
||||
variables: TradesQueryVariables,
|
||||
mapDeltaToData: (delta: TradeUpdateFieldsFragment) => T
|
||||
): T[] => {
|
||||
const updatedData = data ? [...data] : ([] as T[]);
|
||||
orderBy(delta, 'createdAt', 'desc').forEach((tradeUpdate) => {
|
||||
const index = data?.findIndex((trade) => trade.id === tradeUpdate.id) ?? -1;
|
||||
if (index !== -1) {
|
||||
updatedData[index] = {
|
||||
...updatedData[index],
|
||||
...mapDeltaToData(tradeUpdate),
|
||||
};
|
||||
} else if (!data?.length || tradeUpdate.createdAt >= data[0].createdAt) {
|
||||
updatedData.unshift(mapDeltaToData(tradeUpdate));
|
||||
}
|
||||
});
|
||||
return updatedData.slice(0, MAX_TRADES);
|
||||
};
|
||||
|
||||
export type Trade = Omit<TradeFieldsFragment, 'market'> & { market?: Market };
|
||||
export type TradeEdge = Edge<Trade>;
|
||||
|
||||
const getPageInfo = (responseData: TradesQuery | null): PageInfo | null =>
|
||||
responseData?.market?.tradesConnection?.pageInfo || null;
|
||||
responseData?.trades?.pageInfo || null;
|
||||
|
||||
export const tradesProvider = makeDataProvider<
|
||||
Parameters<typeof getData>['0'],
|
||||
ReturnType<typeof getData>,
|
||||
Parameters<typeof getDelta>['0'],
|
||||
ReturnType<typeof getDelta>,
|
||||
TradesQueryVariables
|
||||
TradesQueryVariables,
|
||||
TradesUpdateSubscriptionVariables
|
||||
>({
|
||||
query: TradesDocument,
|
||||
subscriptionQuery: TradesUpdateDocument,
|
||||
update,
|
||||
update: (data, delta, reload, variables) =>
|
||||
update(data, delta, variables, mapTradeUpdateToTrade),
|
||||
getData,
|
||||
getDelta,
|
||||
pagination: {
|
||||
@ -79,34 +99,32 @@ export const tradesProvider = makeDataProvider<
|
||||
append,
|
||||
first: MAX_TRADES,
|
||||
},
|
||||
getSubscriptionVariables: ({ marketId }) => ({ marketId }),
|
||||
});
|
||||
|
||||
export const tradesWithMarketProvider = makeDerivedDataProvider<
|
||||
(TradeEdge | null)[],
|
||||
Trade[],
|
||||
(Trade & Cursor)[],
|
||||
never,
|
||||
TradesQueryVariables
|
||||
>(
|
||||
[
|
||||
tradesProvider,
|
||||
(callback, client) => marketsProvider(callback, client, undefined),
|
||||
(callback, client) => marketsMapProvider(callback, client, undefined),
|
||||
],
|
||||
(partsData): (TradeEdge | null)[] | null => {
|
||||
const edges = partsData[0] as ReturnType<typeof getData>;
|
||||
return edges.map((edge) => {
|
||||
if (edge === null) {
|
||||
return null;
|
||||
}
|
||||
const node = {
|
||||
...edge.node,
|
||||
market: (partsData[1] as Market[]).find(
|
||||
(market) => market.id === edge.node.market.id
|
||||
),
|
||||
};
|
||||
const cursor = edge?.cursor || '';
|
||||
return {
|
||||
cursor,
|
||||
node,
|
||||
};
|
||||
});
|
||||
(partsData, variables, prevData, parts): Trade[] | null => {
|
||||
if (prevData && parts[0].isUpdate) {
|
||||
return update(
|
||||
prevData,
|
||||
parts[0].delta as ReturnType<typeof getDelta>,
|
||||
variables,
|
||||
mapTradeUpdateToTradeWithMarket(partsData[1] as Record<string, Market>)
|
||||
);
|
||||
}
|
||||
return ((partsData[0] as ReturnType<typeof getData>) || []).map(
|
||||
(trade) => ({
|
||||
...trade,
|
||||
market: (partsData[1] as Record<string, Market>)[trade.market.id],
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
@ -11,24 +11,20 @@ export const tradesQuery = (
|
||||
override?: PartialDeep<TradesQuery>
|
||||
): TradesQuery => {
|
||||
const defaultResult: TradesQuery = {
|
||||
market: {
|
||||
id: 'market-0',
|
||||
tradesConnection: {
|
||||
__typename: 'TradeConnection',
|
||||
edges: trades.map((node, i) => ({
|
||||
__typename: 'TradeEdge',
|
||||
node,
|
||||
cursor: (i + 1).toString(),
|
||||
})),
|
||||
pageInfo: {
|
||||
__typename: 'PageInfo',
|
||||
startCursor: '0',
|
||||
endCursor: trades.length.toString(),
|
||||
hasNextPage: false,
|
||||
hasPreviousPage: false,
|
||||
},
|
||||
trades: {
|
||||
__typename: 'TradeConnection',
|
||||
edges: trades.map((node, i) => ({
|
||||
__typename: 'TradeEdge',
|
||||
node,
|
||||
cursor: (i + 1).toString(),
|
||||
})),
|
||||
pageInfo: {
|
||||
__typename: 'PageInfo',
|
||||
startCursor: '0',
|
||||
endCursor: trades.length.toString(),
|
||||
hasNextPage: false,
|
||||
hasPreviousPage: false,
|
||||
},
|
||||
__typename: 'Market',
|
||||
},
|
||||
};
|
||||
|
||||
@ -40,7 +36,7 @@ export const tradesUpdateSubscription = (
|
||||
): TradesUpdateSubscription => {
|
||||
const defaultResult: TradesUpdateSubscription = {
|
||||
__typename: 'Subscription',
|
||||
trades: [
|
||||
tradesStream: [
|
||||
{
|
||||
__typename: 'TradeUpdate',
|
||||
id: '1234567890',
|
||||
|
Loading…
Reference in New Issue
Block a user