Full/incremental log updates.
This commit is contained in:
parent
44fd5a0830
commit
22f2c7c600
@ -7,32 +7,40 @@ import { useQuery } from '@apollo/react-hooks';
|
||||
|
||||
import SIGNAL_LOG from '../../../gql/signal_log.graphql';
|
||||
|
||||
import { ConsoleContext, useQueryStatusReducer } from '../../../hooks';
|
||||
import { ConsoleContext } from '../../../hooks';
|
||||
|
||||
import Log from '../../../components/Log';
|
||||
|
||||
const MAX_LINES = 1000;
|
||||
const oldLines = [];
|
||||
const logBuffer = [];
|
||||
|
||||
const SignalLog = () => {
|
||||
const { config } = useContext(ConsoleContext);
|
||||
const data = useQueryStatusReducer(useQuery(SIGNAL_LOG, {
|
||||
pollInterval: config.api.intervalLog,
|
||||
variables: { first: oldLines.length === 0 }
|
||||
}));
|
||||
const { data, refetch, startPolling, stopPolling } = useQuery(SIGNAL_LOG, {
|
||||
variables: { incremental: false }
|
||||
});
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newLines = JSON.parse(data.signal_log.json);
|
||||
oldLines.push(...newLines);
|
||||
if (oldLines.length > MAX_LINES) {
|
||||
oldLines.splice(0, oldLines.length - MAX_LINES);
|
||||
const { incremental, lines } = JSON.parse(data.signal_log.json);
|
||||
|
||||
if (!incremental) {
|
||||
stopPolling();
|
||||
refetch({ incremental: true });
|
||||
startPolling(config.api.intervalLog);
|
||||
}
|
||||
|
||||
logBuffer.push(...lines);
|
||||
if (logBuffer.length > MAX_LINES) {
|
||||
logBuffer.splice(0, logBuffer.length - MAX_LINES);
|
||||
}
|
||||
|
||||
return (
|
||||
<Log log={oldLines.slice(0)}/>
|
||||
<Log log={logBuffer.slice(0)} />
|
||||
);
|
||||
};
|
||||
}
|
||||
;
|
||||
|
||||
export default SignalLog;
|
||||
|
@ -39,7 +39,6 @@ const useStyles = makeStyles(() => ({
|
||||
const Signal = () => {
|
||||
const classes = useStyles();
|
||||
const [tab, setTab] = useState(TAB_STATUS);
|
||||
const [type, setType] = useState();
|
||||
|
||||
return (
|
||||
<Panel
|
||||
|
@ -7,29 +7,40 @@ import { useQuery } from '@apollo/react-hooks';
|
||||
|
||||
import WNS_LOG from '../../../gql/wns_log.graphql';
|
||||
|
||||
import { ConsoleContext, useQueryStatusReducer } from '../../../hooks';
|
||||
import { ConsoleContext } from '../../../hooks';
|
||||
|
||||
import Log from '../../../components/Log';
|
||||
|
||||
const MAX_LINES = 1000;
|
||||
const oldLines = [];
|
||||
const logBuffer = [];
|
||||
|
||||
const WNSLog = () => {
|
||||
const { config } = useContext(ConsoleContext);
|
||||
const data = useQueryStatusReducer(useQuery(WNS_LOG, { pollInterval: config.api.intervalLog }));
|
||||
const { data, refetch, startPolling, stopPolling } = useQuery(WNS_LOG, {
|
||||
variables: { incremental: false }
|
||||
});
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newLines = JSON.parse(data.wns_log.json);
|
||||
oldLines.push(...newLines);
|
||||
if (oldLines.length > MAX_LINES) {
|
||||
oldLines.splice(0, oldLines.length - MAX_LINES);
|
||||
const { incremental, lines } = JSON.parse(data.signal_log.json);
|
||||
|
||||
if (!incremental) {
|
||||
stopPolling();
|
||||
refetch({ incremental: true });
|
||||
startPolling(config.api.intervalLog);
|
||||
}
|
||||
|
||||
logBuffer.push(...lines);
|
||||
if (logBuffer.length > MAX_LINES) {
|
||||
logBuffer.splice(0, logBuffer.length - MAX_LINES);
|
||||
}
|
||||
|
||||
return (
|
||||
<Log log={oldLines.slice(0)} />
|
||||
<Log log={logBuffer.slice(0)} />
|
||||
);
|
||||
};
|
||||
}
|
||||
;
|
||||
|
||||
export default WNSLog;
|
||||
|
@ -105,7 +105,7 @@ export const createResolvers = config => {
|
||||
timestamp: timestamp(),
|
||||
log: []
|
||||
};
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -7,8 +7,9 @@ import defaultsDeep from 'lodash.defaultsdeep';
|
||||
|
||||
import { ipfsResolvers } from './ipfs';
|
||||
import { systemResolvers } from './system';
|
||||
import { logResolvers } from "./log";
|
||||
import { logResolvers } from './log';
|
||||
|
||||
// eslint-disable-next-line
|
||||
const log = debug('dxos:console:server:resolvers');
|
||||
|
||||
/**
|
||||
|
@ -5,13 +5,13 @@
|
||||
import { spawnSync } from 'child_process';
|
||||
|
||||
class LogCache {
|
||||
constructor(maxLines = 500) {
|
||||
constructor (maxLines = 500) {
|
||||
// Sets in JS iterate in insertion order.
|
||||
this.buffer = new Set();
|
||||
this.maxLines = maxLines;
|
||||
}
|
||||
|
||||
append(lines) {
|
||||
append (lines) {
|
||||
const added = [];
|
||||
for (const line of lines) {
|
||||
if (!this.buffer.has(line)) {
|
||||
@ -37,7 +37,7 @@ const getLogCache = (name) => {
|
||||
_caches.set(name, cache);
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
};
|
||||
|
||||
const getLogs = async (name, incremental = false, lines = 100) => {
|
||||
const command = 'wire';
|
||||
@ -46,7 +46,7 @@ const getLogs = async (name, incremental = false, lines = 100) => {
|
||||
const child = spawnSync(command, args, { encoding: 'utf8' });
|
||||
const logLines = child.stdout.split(/\n/);
|
||||
const cache = getLogCache(name);
|
||||
const added = cache.append(logLines);
|
||||
const added = cache.append(logLines);
|
||||
|
||||
return incremental ? added : Array.from(cache.buffer);
|
||||
};
|
||||
@ -54,38 +54,38 @@ const getLogs = async (name, incremental = false, lines = 100) => {
|
||||
export const logResolvers = {
|
||||
Query: {
|
||||
wns_log: async (_, { incremental }) => {
|
||||
const logs = await getLogs('wns-lite', incremental);
|
||||
const lines = await getLogs('wns-lite', incremental);
|
||||
return {
|
||||
timestamp: new Date().toUTCString(),
|
||||
json: JSON.stringify(logs)
|
||||
json: JSON.stringify({ incremental, lines })
|
||||
};
|
||||
},
|
||||
signal_log: async (_, { incremental }) => {
|
||||
const logs = await getLogs('signal', incremental);
|
||||
const lines = await getLogs('signal', incremental);
|
||||
return {
|
||||
timestamp: new Date().toUTCString(),
|
||||
json: JSON.stringify(logs)
|
||||
json: JSON.stringify({ incremental, lines })
|
||||
};
|
||||
},
|
||||
ipfs_log: async (_, { incremental }) => {
|
||||
const logs = await getLogs('ipfs', incremental);
|
||||
const lines = await getLogs('ipfs', incremental);
|
||||
return {
|
||||
timestamp: new Date().toUTCString(),
|
||||
json: JSON.stringify(logs)
|
||||
json: JSON.stringify({ incremental, lines })
|
||||
};
|
||||
},
|
||||
ipfs_swarm_log: async (_, { incremental }) => {
|
||||
const logs = await getLogs('ipfs-swarm-connect', incremental);
|
||||
const lines = await getLogs('ipfs-swarm-connect', incremental);
|
||||
return {
|
||||
timestamp: new Date().toUTCString(),
|
||||
json: JSON.stringify(logs)
|
||||
json: JSON.stringify({ incremental, lines })
|
||||
};
|
||||
},
|
||||
app_log: async (_, { incremental }) => {
|
||||
const logs = await getLogs('app', incremental);
|
||||
const lines = await getLogs('app', incremental);
|
||||
return {
|
||||
timestamp: new Date().toUTCString(),
|
||||
json: JSON.stringify(logs)
|
||||
json: JSON.stringify({ incremental, lines })
|
||||
};
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user