refactor: 可読性のため一部でArray.prototype.at
を使うように (#11274)
* refactor: `Array.prototype.at`を使うように * fixup! refactor: `Array.prototype.at`を使うように
This commit is contained in:
parent
c0dbc3b53f
commit
2b6dbd4fcb
19 changed files with 46 additions and 44 deletions
|
@ -627,7 +627,7 @@ export default abstract class Chart<T extends Schema> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 要求された範囲の最も古い箇所に位置するログが存在しなかったら
|
// 要求された範囲の最も古い箇所に位置するログが存在しなかったら
|
||||||
} else if (!isTimeSame(new Date(logs[logs.length - 1].date * 1000), gt)) {
|
} else if (!isTimeSame(new Date(logs.at(-1)!.date * 1000), gt)) {
|
||||||
// 要求された範囲の最も古い箇所時点での最も新しいログを持ってきて末尾に追加する
|
// 要求された範囲の最も古い箇所時点での最も新しいログを持ってきて末尾に追加する
|
||||||
// (隙間埋めできないため)
|
// (隙間埋めできないため)
|
||||||
const outdatedLog = await repository.findOne({
|
const outdatedLog = await repository.findOne({
|
||||||
|
|
|
@ -67,8 +67,9 @@ export function maximum(xs: number[]): number {
|
||||||
export function groupBy<T>(f: EndoRelation<T>, xs: T[]): T[][] {
|
export function groupBy<T>(f: EndoRelation<T>, xs: T[]): T[][] {
|
||||||
const groups = [] as T[][];
|
const groups = [] as T[][];
|
||||||
for (const x of xs) {
|
for (const x of xs) {
|
||||||
if (groups.length !== 0 && f(groups[groups.length - 1][0], x)) {
|
const lastGroup = groups.at(-1);
|
||||||
groups[groups.length - 1].push(x);
|
if (lastGroup !== undefined && f(lastGroup[0], x)) {
|
||||||
|
lastGroup.push(x);
|
||||||
} else {
|
} else {
|
||||||
groups.push([x]);
|
groups.push([x]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { Inject, Injectable } from '@nestjs/common';
|
import { Inject, Injectable } from '@nestjs/common';
|
||||||
import { IsNull, MoreThan, Not } from 'typeorm';
|
import { IsNull, MoreThan, Not } from 'typeorm';
|
||||||
import { DI } from '@/di-symbols.js';
|
import { DI } from '@/di-symbols.js';
|
||||||
import type { DriveFilesRepository } from '@/models/index.js';
|
import type { DriveFile, DriveFilesRepository } from '@/models/index.js';
|
||||||
import type { Config } from '@/config.js';
|
import type { Config } from '@/config.js';
|
||||||
import type Logger from '@/logger.js';
|
import type Logger from '@/logger.js';
|
||||||
import { DriveService } from '@/core/DriveService.js';
|
import { DriveService } from '@/core/DriveService.js';
|
||||||
|
@ -31,7 +31,7 @@ export class CleanRemoteFilesProcessorService {
|
||||||
this.logger.info('Deleting cached remote files...');
|
this.logger.info('Deleting cached remote files...');
|
||||||
|
|
||||||
let deletedCount = 0;
|
let deletedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: DriveFile['id'] | null = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const files = await this.driveFilesRepository.find({
|
const files = await this.driveFilesRepository.find({
|
||||||
|
@ -51,7 +51,7 @@ export class CleanRemoteFilesProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = files[files.length - 1].id;
|
cursor = files.at(-1)?.id ?? null;
|
||||||
|
|
||||||
await Promise.all(files.map(file => this.driveService.deleteFileSync(file, true)));
|
await Promise.all(files.map(file => this.driveService.deleteFileSync(file, true)));
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ export class DeleteAccountProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = notes[notes.length - 1].id;
|
cursor = notes.at(-1)?.id ?? null;
|
||||||
|
|
||||||
await this.notesRepository.delete(notes.map(note => note.id));
|
await this.notesRepository.delete(notes.map(note => note.id));
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ export class DeleteAccountProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = files[files.length - 1].id;
|
cursor = files.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
await this.driveService.deleteFileSync(file);
|
await this.driveService.deleteFileSync(file);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { Inject, Injectable } from '@nestjs/common';
|
import { Inject, Injectable } from '@nestjs/common';
|
||||||
import { MoreThan } from 'typeorm';
|
import { MoreThan } from 'typeorm';
|
||||||
import { DI } from '@/di-symbols.js';
|
import { DI } from '@/di-symbols.js';
|
||||||
import type { UsersRepository, DriveFilesRepository } from '@/models/index.js';
|
import type { UsersRepository, DriveFilesRepository, DriveFile } from '@/models/index.js';
|
||||||
import type { Config } from '@/config.js';
|
import type { Config } from '@/config.js';
|
||||||
import type Logger from '@/logger.js';
|
import type Logger from '@/logger.js';
|
||||||
import { DriveService } from '@/core/DriveService.js';
|
import { DriveService } from '@/core/DriveService.js';
|
||||||
|
@ -40,7 +40,7 @@ export class DeleteDriveFilesProcessorService {
|
||||||
}
|
}
|
||||||
|
|
||||||
let deletedCount = 0;
|
let deletedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: DriveFile['id'] | null = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const files = await this.driveFilesRepository.find({
|
const files = await this.driveFilesRepository.find({
|
||||||
|
@ -59,7 +59,7 @@ export class DeleteDriveFilesProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = files[files.length - 1].id;
|
cursor = files.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
await this.driveService.deleteFileSync(file);
|
await this.driveService.deleteFileSync(file);
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { Inject, Injectable } from '@nestjs/common';
|
||||||
import { MoreThan } from 'typeorm';
|
import { MoreThan } from 'typeorm';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { DI } from '@/di-symbols.js';
|
import { DI } from '@/di-symbols.js';
|
||||||
import type { UsersRepository, BlockingsRepository } from '@/models/index.js';
|
import type { UsersRepository, BlockingsRepository, Blocking } from '@/models/index.js';
|
||||||
import type { Config } from '@/config.js';
|
import type { Config } from '@/config.js';
|
||||||
import type Logger from '@/logger.js';
|
import type Logger from '@/logger.js';
|
||||||
import { DriveService } from '@/core/DriveService.js';
|
import { DriveService } from '@/core/DriveService.js';
|
||||||
|
@ -53,7 +53,7 @@ export class ExportBlockingProcessorService {
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
let exportedCount = 0;
|
let exportedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: Blocking['id'] | null = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const blockings = await this.blockingsRepository.find({
|
const blockings = await this.blockingsRepository.find({
|
||||||
|
@ -72,7 +72,7 @@ export class ExportBlockingProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = blockings[blockings.length - 1].id;
|
cursor = blockings.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const block of blockings) {
|
for (const block of blockings) {
|
||||||
const u = await this.usersRepository.findOneBy({ id: block.blockeeId });
|
const u = await this.usersRepository.findOneBy({ id: block.blockeeId });
|
||||||
|
|
|
@ -94,7 +94,7 @@ export class ExportFavoritesProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = favorites[favorites.length - 1].id;
|
cursor = favorites.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const favorite of favorites) {
|
for (const favorite of favorites) {
|
||||||
let poll: Poll | undefined;
|
let poll: Poll | undefined;
|
||||||
|
|
|
@ -79,7 +79,7 @@ export class ExportFollowingProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = followings[followings.length - 1].id;
|
cursor = followings.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const following of followings) {
|
for (const following of followings) {
|
||||||
const u = await this.usersRepository.findOneBy({ id: following.followeeId });
|
const u = await this.usersRepository.findOneBy({ id: following.followeeId });
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { Inject, Injectable } from '@nestjs/common';
|
||||||
import { IsNull, MoreThan } from 'typeorm';
|
import { IsNull, MoreThan } from 'typeorm';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { DI } from '@/di-symbols.js';
|
import { DI } from '@/di-symbols.js';
|
||||||
import type { MutingsRepository, UsersRepository, BlockingsRepository } from '@/models/index.js';
|
import type { MutingsRepository, UsersRepository, BlockingsRepository, Muting } from '@/models/index.js';
|
||||||
import type { Config } from '@/config.js';
|
import type { Config } from '@/config.js';
|
||||||
import type Logger from '@/logger.js';
|
import type Logger from '@/logger.js';
|
||||||
import { DriveService } from '@/core/DriveService.js';
|
import { DriveService } from '@/core/DriveService.js';
|
||||||
|
@ -56,7 +56,7 @@ export class ExportMutingProcessorService {
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
let exportedCount = 0;
|
let exportedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: Muting['id'] | null = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const mutes = await this.mutingsRepository.find({
|
const mutes = await this.mutingsRepository.find({
|
||||||
|
@ -76,7 +76,7 @@ export class ExportMutingProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = mutes[mutes.length - 1].id;
|
cursor = mutes.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const mute of mutes) {
|
for (const mute of mutes) {
|
||||||
const u = await this.usersRepository.findOneBy({ id: mute.muteeId });
|
const u = await this.usersRepository.findOneBy({ id: mute.muteeId });
|
||||||
|
|
|
@ -90,7 +90,7 @@ export class ExportNotesProcessorService {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor = notes[notes.length - 1].id;
|
cursor = notes.at(-1)?.id ?? null;
|
||||||
|
|
||||||
for (const note of notes) {
|
for (const note of notes) {
|
||||||
let poll: Poll | undefined;
|
let poll: Poll | undefined;
|
||||||
|
|
|
@ -181,7 +181,7 @@ export class ActivityPubServerService {
|
||||||
undefined,
|
undefined,
|
||||||
inStock ? `${partOf}?${url.query({
|
inStock ? `${partOf}?${url.query({
|
||||||
page: 'true',
|
page: 'true',
|
||||||
cursor: followings[followings.length - 1].id,
|
cursor: followings.at(-1)!.id,
|
||||||
})}` : undefined,
|
})}` : undefined,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ export class ActivityPubServerService {
|
||||||
undefined,
|
undefined,
|
||||||
inStock ? `${partOf}?${url.query({
|
inStock ? `${partOf}?${url.query({
|
||||||
page: 'true',
|
page: 'true',
|
||||||
cursor: followings[followings.length - 1].id,
|
cursor: followings.at(-1)!.id,
|
||||||
})}` : undefined,
|
})}` : undefined,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -398,7 +398,7 @@ export class ActivityPubServerService {
|
||||||
})}` : undefined,
|
})}` : undefined,
|
||||||
notes.length ? `${partOf}?${url.query({
|
notes.length ? `${partOf}?${url.query({
|
||||||
page: 'true',
|
page: 'true',
|
||||||
until_id: notes[notes.length - 1].id,
|
until_id: notes.at(-1)!.id,
|
||||||
})}` : undefined,
|
})}` : undefined,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -447,12 +447,12 @@ export async function testPaginationConsistency<Entity extends { id: string, cre
|
||||||
for (const limit of [1, 5, 10, 100, undefined]) {
|
for (const limit of [1, 5, 10, 100, undefined]) {
|
||||||
// 1. sinceId/DateとuntilId/Dateで両端を指定して取得した結果が期待通りになっていること
|
// 1. sinceId/DateとuntilId/Dateで両端を指定して取得した結果が期待通りになっていること
|
||||||
if (ordering === 'desc') {
|
if (ordering === 'desc') {
|
||||||
const end = expected[expected.length - 1];
|
const end = expected.at(-1)!;
|
||||||
let last = await fetchEntities(rangeToParam({ limit, since: end }));
|
let last = await fetchEntities(rangeToParam({ limit, since: end }));
|
||||||
const actual: Entity[] = [];
|
const actual: Entity[] = [];
|
||||||
while (last.length !== 0) {
|
while (last.length !== 0) {
|
||||||
actual.push(...last);
|
actual.push(...last);
|
||||||
last = await fetchEntities(rangeToParam({ limit, until: last[last.length - 1], since: end }));
|
last = await fetchEntities(rangeToParam({ limit, until: last.at(-1), since: end }));
|
||||||
}
|
}
|
||||||
actual.push(end);
|
actual.push(end);
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
|
@ -467,7 +467,7 @@ export async function testPaginationConsistency<Entity extends { id: string, cre
|
||||||
const actual: Entity[] = [];
|
const actual: Entity[] = [];
|
||||||
while (last.length !== 0) {
|
while (last.length !== 0) {
|
||||||
actual.push(...last);
|
actual.push(...last);
|
||||||
last = await fetchEntities(rangeToParam({ limit, since: last[last.length - 1] }));
|
last = await fetchEntities(rangeToParam({ limit, since: last.at(-1) }));
|
||||||
}
|
}
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
actual.map(({ id, createdAt }) => id + ':' + createdAt),
|
actual.map(({ id, createdAt }) => id + ':' + createdAt),
|
||||||
|
@ -480,7 +480,7 @@ export async function testPaginationConsistency<Entity extends { id: string, cre
|
||||||
const actual: Entity[] = [];
|
const actual: Entity[] = [];
|
||||||
while (last.length !== 0) {
|
while (last.length !== 0) {
|
||||||
actual.push(...last);
|
actual.push(...last);
|
||||||
last = await fetchEntities(rangeToParam({ limit, until: last[last.length - 1] }));
|
last = await fetchEntities(rangeToParam({ limit, until: last.at(-1) }));
|
||||||
}
|
}
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
actual.map(({ id, createdAt }) => id + ':' + createdAt),
|
actual.map(({ id, createdAt }) => id + ':' + createdAt),
|
||||||
|
|
|
@ -568,7 +568,7 @@ function fetchMoreFolders() {
|
||||||
os.api('drive/folders', {
|
os.api('drive/folders', {
|
||||||
folderId: folder.value ? folder.value.id : null,
|
folderId: folder.value ? folder.value.id : null,
|
||||||
type: props.type,
|
type: props.type,
|
||||||
untilId: folders.value[folders.value.length - 1].id,
|
untilId: folders.value.at(-1)?.id,
|
||||||
limit: max + 1,
|
limit: max + 1,
|
||||||
}).then(folders => {
|
}).then(folders => {
|
||||||
if (folders.length === max + 1) {
|
if (folders.length === max + 1) {
|
||||||
|
@ -591,7 +591,7 @@ function fetchMoreFiles() {
|
||||||
os.api('drive/files', {
|
os.api('drive/files', {
|
||||||
folderId: folder.value ? folder.value.id : null,
|
folderId: folder.value ? folder.value.id : null,
|
||||||
type: props.type,
|
type: props.type,
|
||||||
untilId: files.value[files.value.length - 1].id,
|
untilId: files.value.at(-1)?.id,
|
||||||
limit: max + 1,
|
limit: max + 1,
|
||||||
}).then(files => {
|
}).then(files => {
|
||||||
if (files.length === max + 1) {
|
if (files.length === max + 1) {
|
||||||
|
|
|
@ -59,8 +59,8 @@ function draw(): void {
|
||||||
|
|
||||||
polygonPoints = `0,${ viewBoxY } ${ polylinePoints } ${ viewBoxX },${ viewBoxY }`;
|
polygonPoints = `0,${ viewBoxY } ${ polylinePoints } ${ viewBoxX },${ viewBoxY }`;
|
||||||
|
|
||||||
headX = _polylinePoints[_polylinePoints.length - 1][0];
|
headX = _polylinePoints.at(-1)![0];
|
||||||
headY = _polylinePoints[_polylinePoints.length - 1][1];
|
headY = _polylinePoints.at(-1)![1];
|
||||||
}
|
}
|
||||||
|
|
||||||
watch(() => props.src, draw, { immediate: true });
|
watch(() => props.src, draw, { immediate: true });
|
||||||
|
|
|
@ -120,7 +120,7 @@ const contextmenu = $computed(() => ([{
|
||||||
|
|
||||||
function back() {
|
function back() {
|
||||||
history.pop();
|
history.pop();
|
||||||
router.replace(history[history.length - 1].path, history[history.length - 1].key);
|
router.replace(history.at(-1)!.path, history.at(-1)!.key);
|
||||||
}
|
}
|
||||||
|
|
||||||
function reload() {
|
function reload() {
|
||||||
|
|
|
@ -233,7 +233,7 @@ const fetchMore = async (): Promise<void> => {
|
||||||
...(props.pagination.offsetMode ? {
|
...(props.pagination.offsetMode ? {
|
||||||
offset: offset.value,
|
offset: offset.value,
|
||||||
} : {
|
} : {
|
||||||
untilId: Array.from(items.value.keys())[items.value.size - 1],
|
untilId: Array.from(items.value.keys()).at(-1),
|
||||||
}),
|
}),
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
|
@ -297,7 +297,7 @@ const fetchMoreAhead = async (): Promise<void> => {
|
||||||
...(props.pagination.offsetMode ? {
|
...(props.pagination.offsetMode ? {
|
||||||
offset: offset.value,
|
offset: offset.value,
|
||||||
} : {
|
} : {
|
||||||
sinceId: Array.from(items.value.keys())[items.value.size - 1],
|
sinceId: Array.from(items.value.keys()).at(-1),
|
||||||
}),
|
}),
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
if (res.length === 0) {
|
if (res.length === 0) {
|
||||||
|
|
|
@ -78,8 +78,9 @@ export function maximum(xs: number[]): number {
|
||||||
export function groupBy<T>(f: EndoRelation<T>, xs: T[]): T[][] {
|
export function groupBy<T>(f: EndoRelation<T>, xs: T[]): T[][] {
|
||||||
const groups = [] as T[][];
|
const groups = [] as T[][];
|
||||||
for (const x of xs) {
|
for (const x of xs) {
|
||||||
if (groups.length !== 0 && f(groups[groups.length - 1][0], x)) {
|
const lastGroup = groups.at(-1);
|
||||||
groups[groups.length - 1].push(x);
|
if (lastGroup !== undefined && f(lastGroup[0], x)) {
|
||||||
|
lastGroup.push(x);
|
||||||
} else {
|
} else {
|
||||||
groups.push([x]);
|
groups.push([x]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,10 +121,10 @@ function onStats(connStats) {
|
||||||
cpuPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${cpuPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
cpuPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${cpuPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
||||||
memPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${memPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
memPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${memPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
||||||
|
|
||||||
cpuHeadX = cpuPolylinePointsStats[cpuPolylinePointsStats.length - 1][0];
|
cpuHeadX = cpuPolylinePointsStats.at(-1)![0];
|
||||||
cpuHeadY = cpuPolylinePointsStats[cpuPolylinePointsStats.length - 1][1];
|
cpuHeadY = cpuPolylinePointsStats.at(-1)![1];
|
||||||
memHeadX = memPolylinePointsStats[memPolylinePointsStats.length - 1][0];
|
memHeadX = memPolylinePointsStats.at(-1)![0];
|
||||||
memHeadY = memPolylinePointsStats[memPolylinePointsStats.length - 1][1];
|
memHeadY = memPolylinePointsStats.at(-1)![1];
|
||||||
|
|
||||||
cpuP = (connStats.cpu * 100).toFixed(0);
|
cpuP = (connStats.cpu * 100).toFixed(0);
|
||||||
memP = (connStats.mem.active / props.meta.mem.total * 100).toFixed(0);
|
memP = (connStats.mem.active / props.meta.mem.total * 100).toFixed(0);
|
||||||
|
|
|
@ -94,10 +94,10 @@ function onStats(connStats) {
|
||||||
inPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${inPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
inPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${inPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
||||||
outPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${outPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
outPolygonPoints = `${viewBoxX - (stats.length - 1)},${viewBoxY} ${outPolylinePoints} ${viewBoxX},${viewBoxY}`;
|
||||||
|
|
||||||
inHeadX = inPolylinePointsStats[inPolylinePointsStats.length - 1][0];
|
inHeadX = inPolylinePointsStats.at(-1)![0];
|
||||||
inHeadY = inPolylinePointsStats[inPolylinePointsStats.length - 1][1];
|
inHeadY = inPolylinePointsStats.at(-1)![1];
|
||||||
outHeadX = outPolylinePointsStats[outPolylinePointsStats.length - 1][0];
|
outHeadX = outPolylinePointsStats.at(-1)![0];
|
||||||
outHeadY = outPolylinePointsStats[outPolylinePointsStats.length - 1][1];
|
outHeadY = outPolylinePointsStats.at(-1)![1];
|
||||||
|
|
||||||
inRecent = connStats.net.rx;
|
inRecent = connStats.net.rx;
|
||||||
outRecent = connStats.net.tx;
|
outRecent = connStats.net.tx;
|
||||||
|
|
Loading…
Reference in a new issue