From 1c32ef5a2f11295871755e98a344a770bfca01f7 Mon Sep 17 00:00:00 2001 From: Isla <5048549+islathehut@users.noreply.github.com> Date: Thu, 2 Jan 2025 18:57:48 -0500 Subject: [PATCH] fix: Fixes download issues on cancellation, download status logs, connection stability (#2687) * Fix download issues and improve connections * Update e2e-linux.yml * Some mild updates to help with initial connections and connection stability * Mild dependency update * Mild abort improvements and make the file component more legible * Fix snapshots --- .github/workflows/e2e-linux.yml | 2 +- packages/backend/package-lock.json | 20 ++ packages/backend/package.json | 1 + packages/backend/src/nest/common/utils.ts | 1 - .../ipfs-file-manager.service.ts | 177 +++++++++++------- .../ipfs-file-manager.types.ts | 6 + .../backend/src/nest/ipfs/ipfs.service.ts | 19 +- .../backend/src/nest/libp2p/libp2p.const.ts | 2 + .../backend/src/nest/libp2p/libp2p.service.ts | 17 +- .../backend/src/nest/storage/storage.types.ts | 1 + .../File/FileComponent/FileComponent.test.tsx | 4 +- .../File/FileComponent/FileComponent.tsx | 8 +- .../channels/NestedMessageContent.test.tsx | 6 +- tsconfig.build.json | 1 - 14 files changed, 170 insertions(+), 95 deletions(-) create mode 100644 packages/backend/src/nest/libp2p/libp2p.const.ts diff --git a/.github/workflows/e2e-linux.yml b/.github/workflows/e2e-linux.yml index c4b87dfc07..e94d2861f8 100644 --- a/.github/workflows/e2e-linux.yml +++ b/.github/workflows/e2e-linux.yml @@ -16,7 +16,7 @@ jobs: DISPLAY: ":99.0" TEST_MODE: true IS_CI: true - SKIP_BACK_COMPAT_TEST_BRANCHES: '["update-orbitdb", "chore/upgrade-orbitdb-2_4_3"]' + SKIP_BACK_COMPAT_TEST_BRANCHES: '["update-orbitdb", "chore/upgrade-orbitdb-2_4_3", "fix/2679-2680-2682-3_0-fixes"]' steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 diff --git a/packages/backend/package-lock.json b/packages/backend/package-lock.json index cf0927bd88..f92f42d1c9 100644 --- a/packages/backend/package-lock.json +++ b/packages/backend/package-lock.json @@ -97,6 +97,7 @@ "@types/cors": "2.8.17", "@types/crypto-js": "^4.0.2", "@types/express": "^4.17.9", + "@types/get-port": "4.2.0", "@types/jest": "28.1.8", "@types/luxon": "^3.4.2", "@types/mock-fs": "^4.13.1", @@ -10285,6 +10286,16 @@ "@types/send": "*" } }, + "node_modules/@types/get-port": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@types/get-port/-/get-port-4.2.0.tgz", + "integrity": "sha512-Iv2FAb5RnIk/eFO2CTu8k+0VMmIR15pKbcqRWi+s3ydW+aKXlN2yemP92SrO++ERyJx+p6Ie1ggbLBMbU1SjiQ==", + "deprecated": "This is a stub types definition. get-port provides its own type definitions, so you do not need this installed.", + "dev": true, + "dependencies": { + "get-port": "*" + } + }, "node_modules/@types/jest": { "version": "28.1.8", "resolved": "https://registry.npmjs.org/@types/jest/-/jest-28.1.8.tgz", @@ -30162,6 +30173,15 @@ } } }, + "@types/get-port": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@types/get-port/-/get-port-4.2.0.tgz", + "integrity": "sha512-Iv2FAb5RnIk/eFO2CTu8k+0VMmIR15pKbcqRWi+s3ydW+aKXlN2yemP92SrO++ERyJx+p6Ie1ggbLBMbU1SjiQ==", + "dev": true, + "requires": { + "get-port": "*" + } + }, "@types/jest": { "version": "28.1.8", "resolved": "https://registry.npmjs.org/@types/jest/-/jest-28.1.8.tgz", diff --git a/packages/backend/package.json b/packages/backend/package.json index 382e7abcf5..bd70c5324a 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -63,6 +63,7 @@ "@types/cors": "2.8.17", "@types/crypto-js": "^4.0.2", "@types/express": "^4.17.9", + "@types/get-port": "4.2.0", "@types/jest": "28.1.8", "@types/luxon": "^3.4.2", "@types/mock-fs": "^4.13.1", diff --git a/packages/backend/src/nest/common/utils.ts b/packages/backend/src/nest/common/utils.ts index 58336ffe8a..0733b1b7ff 100644 --- a/packages/backend/src/nest/common/utils.ts +++ b/packages/backend/src/nest/common/utils.ts @@ -6,7 +6,6 @@ import { UserData } from '@quiet/types' import { HttpsProxyAgent } from 'https-proxy-agent' import { generateKeyPair } from '@libp2p/crypto/keys' import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { type PeerId } from '@libp2p/interface' import tmp from 'tmp' import crypto from 'crypto' import { type PermsData } from '@quiet/types' diff --git a/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.service.ts b/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.service.ts index 13420312c1..0d56b98ef5 100644 --- a/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.service.ts +++ b/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.service.ts @@ -10,7 +10,7 @@ import sizeOf from 'image-size' import { CID } from 'multiformats/cid' import { DownloadProgress, DownloadState, DownloadStatus, FileMetadata, imagesExtensions } from '@quiet/types' import { QUIET_DIR } from '../const' -import { ExportProgress, FilesData, IpfsFilesManagerEvents } from './ipfs-file-manager.types' +import { ExportProgress, ExportWalk, FilesData, IpfsFilesManagerEvents } from './ipfs-file-manager.types' import { StorageEvents, UnixFSEvents } from '../storage/storage.types' import { MAX_EVENT_LISTENERS, TRANSFER_SPEED_SPAN, UPDATE_STATUS_INTERVAL } from './ipfs-file-manager.const' import { sleep } from '../common/sleep' @@ -238,27 +238,26 @@ export class IpfsFileManagerService extends EventEmitter { private async cancelDownload(cid: string) { const _logger = createLogger(`${IpfsFileManagerService.name}:cancel:${cid}`) - const abortController = this.controllers.get(cid) + let abortController = this.controllers.get(cid) const downloadInProgress = this.files.get(cid) if (!downloadInProgress) return // In case download is cancelled right after start and queue is not yet initialized. - if (!abortController) { + while (abortController == null) { _logger.info(`Waiting for abort controller to be created...`) await sleep(1000) - await this.cancelDownload(cid) - } else { - _logger.info(`Aborting download`) - const controller = abortController.controller - this.cancelledDownloads.add(cid) - controller.abort() + abortController = this.controllers.get(cid) } + + _logger.info(`Aborting download`) + const controller = abortController.controller + controller.abort() } public async downloadFile(fileMetadata: FileMetadata) { const _logger = createLogger(`${IpfsFileManagerService.name}:download:${fileMetadata.cid.toString()}`) const fileCid: CID = CID.parse(fileMetadata.cid) - const downloadedBlocks: Set = new Set() + let downloadedBlocks: number = 0 const pendingBlocks: Set = new Set() const controller = new AbortController() @@ -267,7 +266,7 @@ export class IpfsFileManagerService extends EventEmitter { this.controllers.set(fileMetadata.cid, { controller }) // Add try catch and return downloadBlocks with timeout - const initialStat = await this.ufs.stat(fileCid) + const initialStat = await this.ufs.stat(fileCid, { signal: controller.signal }) const fileSize = initialStat.fileSize const localSize = initialStat.localFileSize if (fileMetadata.size && !compare(fileMetadata.size, fileSize, 0.05)) { @@ -304,7 +303,9 @@ export class IpfsFileManagerService extends EventEmitter { // Transfer speed const blocksStats: BlockStat[] = [] - const handleDownloadProgressEvents = async (event: GetEvents | CustomProgressEvent) => { + const handleDownloadProgressEvents = async ( + event: GetEvents | GetBlockProgressEvents | CustomProgressEvent + ) => { // if we don't have an event type there's nothing useful to do if (event.type === null) { return @@ -324,19 +325,13 @@ export class IpfsFileManagerService extends EventEmitter { } _logger.info(`Getting block ${cidStr} from local blockstore`) - if (downloadedBlocks.has(cidStr)) { - _logger.info(`Already downloaded block ${cidStr}`) - return - } - - downloadedBlocks.add(cidStr) } // handler for events where we are walking the file to get all child blocks // NOTE: this happens at the beginning of the download process AND when we have all of the blocks are we are walking through them to get the contents - const handleWalkFile = async (cid: CID) => { - const cidStr = cid.toString() - if (downloadedBlocks.size === 0 && pendingBlocks.size === 0) { + const handleWalkFile = async (event: CustomProgressEvent) => { + const cidStr = event.detail.cid.toString() + if (downloadedBlocks === 0 && pendingBlocks.size === 0) { // this is the first time we've seen this event so it means we are just starting the download process _logger.info(`Download started, walking`) await this.updateStatus(cidStr, DownloadState.Downloading) @@ -356,6 +351,7 @@ export class IpfsFileManagerService extends EventEmitter { byteLength: Number(totalBytes) - Number(bytesRead), } blocksStats.push(blockStat) + downloadedBlocks += 1 } // handler for events where we are asking for the block on the network because we don't have it stored locally @@ -370,10 +366,20 @@ export class IpfsFileManagerService extends EventEmitter { pendingBlocks.add(cidStr) } + const handlePutBlock = async (event: GetBlockProgressEvents) => { + const cidStr = event.detail.toString() + if (pendingBlocks.has(cidStr)) { + pendingBlocks.delete(cidStr) + } + + _logger.info(`Putting block ${cidStr} into local blockstore`) + } + + this.logger.info(`Event with type`, event.type) switch (event.type) { case UnixFSEvents.WALK_FILE: // this event has a different format for how it stores the CID on the detail - await handleWalkFile((event as any).detail.cid as CID) + await handleWalkFile(event as CustomProgressEvent) break case UnixFSEvents.GET_BLOCK_PROVIDERS: case UnixFSEvents.WANT_BLOCK: @@ -385,6 +391,9 @@ export class IpfsFileManagerService extends EventEmitter { case UnixFSEvents.DOWNLOAD_BLOCK: await handleDownloadBlock(event as CustomProgressEvent) break + case UnixFSEvents.PUT_BLOCK: + await handlePutBlock(event as GetBlockProgressEvents) + break default: break } @@ -392,43 +401,67 @@ export class IpfsFileManagerService extends EventEmitter { return } - const updateDownloadStatusWithTransferSpeed = setInterval(async () => { - const totalDownloadedBytes = Number((await this.ufs.stat(fileCid)).localFileSize) - let recentlyDownloadedBytes = 0 - const thresholdTimestamp = Math.floor(Date.now() / 1000) - TRANSFER_SPEED_SPAN - blocksStats.forEach((blockStat: BlockStat) => { - if (blockStat.fetchTime >= thresholdTimestamp) { - recentlyDownloadedBytes += blockStat.byteLength + const updateDownloadStatusWithTransferSpeed = setInterval( + async () => { + if (controller.signal.aborted) { + _logger.warn(`Cancelling update status interval due to cancellation`) + clearInterval(updateDownloadStatusWithTransferSpeed) + return } - }) - this.logger.info(`Current downloaded bytes`, recentlyDownloadedBytes, totalDownloadedBytes) - const transferSpeed = recentlyDownloadedBytes === 0 ? 0 : recentlyDownloadedBytes / TRANSFER_SPEED_SPAN - const fileState = this.files.get(fileMetadata.cid) - if (!fileState) { - this.logger.error(`No saved data for file cid ${fileMetadata.cid}`) - return - } - this.files.set(fileMetadata.cid, { - ...fileState, - transferSpeed: transferSpeed, - downloadedBytes: totalDownloadedBytes, - }) - await this.updateStatus(fileMetadata.cid, DownloadState.Downloading) - }, UPDATE_STATUS_INTERVAL * 1000) + const totalDownloadedBytes = Number((await this.ufs.stat(fileCid)).localFileSize) + let recentlyDownloadedBytes = 0 + const thresholdTimestamp = Math.floor(Date.now() / 1000) - TRANSFER_SPEED_SPAN + blocksStats.forEach((blockStat: BlockStat) => { + if (blockStat.fetchTime >= thresholdTimestamp) { + recentlyDownloadedBytes += blockStat.byteLength + } + }) + this.logger.info(`Current downloaded bytes`, recentlyDownloadedBytes, totalDownloadedBytes) - const downloadCompletedOrCanceled = new Promise((resolve, reject) => { - const interval = setInterval(() => { + const transferSpeed = recentlyDownloadedBytes === 0 ? 0 : recentlyDownloadedBytes / TRANSFER_SPEED_SPAN const fileState = this.files.get(fileMetadata.cid) - this.ufs.stat(fileCid).then(({ fileSize, localFileSize }) => { - if (this.cancelledDownloads.has(fileMetadata.cid) || !fileState || localFileSize === fileSize) { - clearInterval(interval) - resolve('No more blocks to fetch, download is completed or canceled') - } else { - _logger.info(`Downloaded ${downloadedBlocks.size} blocks (${pendingBlocks.size} blocks pending)`) - } + if (!fileState) { + this.logger.error(`No saved data for file cid ${fileMetadata.cid}`) + return + } + this.files.set(fileMetadata.cid, { + ...fileState, + transferSpeed: transferSpeed, + downloadedBytes: totalDownloadedBytes, }) - }, 1000) + await this.updateStatus(fileMetadata.cid, DownloadState.Downloading) + }, + UPDATE_STATUS_INTERVAL * 1000, + controller + ) + + const downloadCompletedOrCanceled = new Promise((resolve, reject) => { + const interval = setInterval( + () => { + const fileState = this.files.get(fileMetadata.cid) + this.ufs + .stat(fileCid) + .then(({ fileSize, localFileSize }) => { + if (controller.signal.aborted || !fileState || localFileSize === fileSize) { + clearInterval(interval) + resolve('No more blocks to fetch, download is completed or canceled') + } else { + _logger.info(`Downloaded ${downloadedBlocks} blocks (${pendingBlocks.size} blocks pending)`) + } + }) + .catch(e => { + clearInterval(interval) + if (controller.signal.aborted) { + resolve('No more blocks to fetch, download is completed or canceled') + } else { + reject(e) + } + }) + }, + 1000, + controller + ) }) let downloading = fileSize !== initialStat.localFileSize @@ -438,7 +471,7 @@ export class IpfsFileManagerService extends EventEmitter { signal: controller.signal, } - while (downloading) { + while (downloading && !controller.signal.aborted) { const stat = await this.ufs.stat(fileCid) const totalSize = Number(stat.fileSize) const downloadedSize = Number(stat.localFileSize) @@ -465,7 +498,7 @@ export class IpfsFileManagerService extends EventEmitter { _logger.info(`Got block with size (in bytes)`, entry.byteLength) } } catch (e) { - if (this.cancelledDownloads.has(fileCid.toString())) { + if (controller.signal.aborted) { _logger.warn(`Cancelling download`) downloading = false break @@ -476,7 +509,7 @@ export class IpfsFileManagerService extends EventEmitter { // I don't love that I'm doing this but just writing the files straight from the cat operation above ends up giving you a corrupt final file // This gives us all blocks as they are - if (!this.cancelledDownloads.has(fileCid.toString())) { + if (!controller.signal.aborted) { try { const entries = this.ufs.cat(fileCid, baseCatOptions) for await (const entry of entries) { @@ -493,7 +526,7 @@ export class IpfsFileManagerService extends EventEmitter { }) } } catch (e) { - if (this.cancelledDownloads.has(fileCid.toString())) { + if (controller.signal.aborted) { _logger.warn(`Cancelling download`) } } @@ -501,31 +534,37 @@ export class IpfsFileManagerService extends EventEmitter { writeStream.end() - await downloadCompletedOrCanceled + try { + await downloadCompletedOrCanceled + } catch (e) { + this.logger.error(`Error while waiting for download to be completed or canceled`, e) + } clearInterval(updateDownloadStatusWithTransferSpeed) const fileState = this.files.get(fileMetadata.cid) - if (!fileState) { + if (!fileState && !controller.signal.aborted) { this.logger.error(`No saved data for file cid ${fileMetadata.cid}`) return } - if (this.cancelledDownloads.has(fileMetadata.cid)) { - this.files.set(fileMetadata.cid, { - ...fileState, - downloadedBytes: 0, - transferSpeed: 0, - }) + if (controller.signal.aborted) { + if (fileState != null) { + this.files.set(fileMetadata.cid, { + ...fileState, + downloadedBytes: 0, + transferSpeed: 0, + }) + } + await this.updateStatus(fileMetadata.cid, DownloadState.Canceled) - this.cancelledDownloads.delete(fileMetadata.cid) - this.controllers.delete(fileMetadata.cid) this.files.delete(fileMetadata.cid) + this.controllers.delete(fileMetadata.cid) return } this.files.set(fileMetadata.cid, { - ...fileState, + ...fileState!, transferSpeed: 0, downloadedBytes: Number((await this.ufs.stat(fileCid)).localFileSize), }) diff --git a/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.types.ts b/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.types.ts index 1ebb8b7214..fdeccd5e63 100644 --- a/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.types.ts +++ b/packages/backend/src/nest/ipfs-file-manager/ipfs-file-manager.types.ts @@ -1,3 +1,5 @@ +import { CID } from 'multiformats' + export enum IpfsFilesManagerEvents { // Incoming evetns DOWNLOAD_FILE = 'downloadFile', @@ -38,3 +40,7 @@ export interface ExportProgress { */ fileSize: bigint } + +export interface ExportWalk { + cid: CID +} diff --git a/packages/backend/src/nest/ipfs/ipfs.service.ts b/packages/backend/src/nest/ipfs/ipfs.service.ts index 00857b202f..2a3272453c 100644 --- a/packages/backend/src/nest/ipfs/ipfs.service.ts +++ b/packages/backend/src/nest/ipfs/ipfs.service.ts @@ -7,6 +7,7 @@ import { LevelDatastore } from 'datastore-level' import { LevelBlockstore, LevelBlockstoreInit } from 'blockstore-level' import { Libp2pService } from '../libp2p/libp2p.service' import { DatabaseOptions, Level } from 'level' +import { BITSWAP_PROTOCOL } from '../libp2p/libp2p.const' type StoreInit = { blockstore?: Omit @@ -43,20 +44,20 @@ export class IpfsService { await this.initializeStores() this.logger.info(`Creating Helia instance`) + const bitstwapInstance = bitswap({ + incomingStreamTimeout: 60_000, + sendBlocksTimeout: 30_000, + sendBlocksDebounce: 100, + // @ts-expect-error This is part of the config interface but it isn't typed that way + messageReceiveTimeout: 30_000, + protocol: BITSWAP_PROTOCOL, + }) ipfs = await createHelia({ start: false, libp2p: libp2pInstance, blockstore: this.blockstore!, datastore: this.datastore!, - blockBrokers: [ - bitswap({ - incomingStreamTimeout: 60_000, - sendBlocksTimeout: 30_000, - sendBlocksDebounce: 100, - // @ts-expect-error This is part of the config interface but it isn't typed that way - messageReceiveTimeout: 30_000, - }), - ], + blockBrokers: [bitstwapInstance], }) this.ipfsInstance = ipfs } catch (error) { diff --git a/packages/backend/src/nest/libp2p/libp2p.const.ts b/packages/backend/src/nest/libp2p/libp2p.const.ts new file mode 100644 index 0000000000..629b60eb75 --- /dev/null +++ b/packages/backend/src/nest/libp2p/libp2p.const.ts @@ -0,0 +1,2 @@ +export const BITSWAP_PROTOCOL = '/quiet/ipfs/bitswap/1.2.0' +export const WEBSOCKET_CIPHER_SUITE = 'TLS_AES_256_GCM_SHA384' diff --git a/packages/backend/src/nest/libp2p/libp2p.service.ts b/packages/backend/src/nest/libp2p/libp2p.service.ts index 3f65637a89..16a6be343a 100644 --- a/packages/backend/src/nest/libp2p/libp2p.service.ts +++ b/packages/backend/src/nest/libp2p/libp2p.service.ts @@ -1,10 +1,9 @@ import { gossipsub } from '@chainsafe/libp2p-gossipsub' import { noise, pureJsCrypto } from '@chainsafe/libp2p-noise' -import { plaintext } from '@libp2p/plaintext' import { yamux } from '@chainsafe/libp2p-yamux' import { identify, identifyPush } from '@libp2p/identify' -import { PeerId, type Libp2p } from '@libp2p/interface' +import { Stream, type Libp2p } from '@libp2p/interface' import { kadDHT } from '@libp2p/kad-dht' import { keychain } from '@libp2p/keychain' import { peerIdFromString } from '@libp2p/peer-id' @@ -39,6 +38,7 @@ import { } from './libp2p.types' import { createLogger } from '../common/logger' import { Libp2pDatastore } from './libp2p.datastore' +import { WEBSOCKET_CIPHER_SUITE, BITSWAP_PROTOCOL } from './libp2p.const' const KEY_LENGTH = 32 export const LIBP2P_PSK_METADATA = '/key/swarm/psk/1.0.0/\n/base16/\n' @@ -256,8 +256,10 @@ export class Libp2pService extends EventEmitter { maxConnections: 20, // TODO: increase? dialTimeout: 120_000, maxParallelDials: 10, - inboundUpgradeTimeout: 60_000, + inboundUpgradeTimeout: 30_000, + outboundUpgradeTimeout: 30_000, protocolNegotiationTimeout: 10_000, + maxDialQueueLength: 500, }, privateKey: params.peerId.privKey, addresses: { listen: params.listenAddresses }, @@ -280,17 +282,21 @@ export class Libp2pService extends EventEmitter { filter: filters.all, websocket: { agent: params.agent, + handshakeTimeout: 15_000, + ciphers: WEBSOCKET_CIPHER_SUITE, + followRedirects: true, }, localAddress: params.localAddress, targetPort: params.targetPort, + closeOnEnd: true, }), ], services: { - ping: ping(), + ping: ping({ timeout: 30_000 }), pubsub: gossipsub({ // neccessary to run a single peer allowPublishToZeroTopicPeers: true, - fallbackToFloodsub: true, + fallbackToFloodsub: false, emitSelf: true, debugName: params.peerId.peerId.toString(), doPX: true, @@ -301,6 +307,7 @@ export class Libp2pService extends EventEmitter { dht: kadDHT({ allowQueryWithZeroPeers: true, clientMode: false, + initialQuerySelfInterval: 500, }), }, }) diff --git a/packages/backend/src/nest/storage/storage.types.ts b/packages/backend/src/nest/storage/storage.types.ts index db8f3a0879..20bbf4039b 100644 --- a/packages/backend/src/nest/storage/storage.types.ts +++ b/packages/backend/src/nest/storage/storage.types.ts @@ -31,6 +31,7 @@ export enum UnixFSEvents { GET_BLOCK = 'blocks:get:blockstore:get', WANT_BLOCK = 'bitswap:want-block:block', DOWNLOAD_BLOCK = 'unixfs:exporter:progress:unixfs:file', + PUT_BLOCK = 'blocks:get:blockstore:put', } export interface CsrReplicatedPromiseValues { diff --git a/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.test.tsx b/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.test.tsx index a98e46e4dd..3a00634f70 100644 --- a/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.test.tsx +++ b/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.test.tsx @@ -44,7 +44,7 @@ describe('FileComponent', () => {
@@ -67,7 +67,7 @@ describe('FileComponent', () => { >
my-file-name-goes-here-an-isnt-truncated .zip diff --git a/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.tsx b/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.tsx index d9c7c14015..b90e261ffa 100644 --- a/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.tsx +++ b/packages/desktop/src/renderer/components/Channel/File/FileComponent/FileComponent.tsx @@ -30,7 +30,7 @@ const FileComponentStyled = styled('div')(({ theme }) => ({ padding: '16px', backgroundColor: theme.palette.colors.white, borderRadius: '8px', - border: `1px solid ${theme.palette.colors.border01}`, + border: `1px solid ${theme.palette.colors.border02}`, [`& .${classes.icon}`]: { minWidth: '40px', @@ -156,7 +156,7 @@ export const FileComponent: React.FC = ({ variant='indeterminate' size={18} thickness={4} - style={{ position: 'absolute', color: theme.palette.colors.lightGray }} + style={{ position: 'absolute', color: theme.palette.colors.purple }} /> ) case DownloadState.Downloading: @@ -167,7 +167,7 @@ export const FileComponent: React.FC = ({ size={18} thickness={4} value={100} - style={{ position: 'absolute', color: theme.palette.colors.gray }} + style={{ position: 'absolute', color: theme.palette.colors.purple }} /> = ({
{renderIcon()}
- + {name} {ext} diff --git a/packages/desktop/src/renderer/components/widgets/channels/NestedMessageContent.test.tsx b/packages/desktop/src/renderer/components/widgets/channels/NestedMessageContent.test.tsx index 153b25b029..e1ad564b85 100644 --- a/packages/desktop/src/renderer/components/widgets/channels/NestedMessageContent.test.tsx +++ b/packages/desktop/src/renderer/components/widgets/channels/NestedMessageContent.test.tsx @@ -219,7 +219,7 @@ describe('NestedMessageContent', () => { data-testid="messagesGroupContent-0" >
@@ -235,7 +235,7 @@ describe('NestedMessageContent', () => { aria-valuenow="100" class="MuiCircularProgress-root MuiCircularProgress-determinate MuiCircularProgress-colorPrimary css-1036n7b-MuiCircularProgress-root" role="progressbar" - style="width: 18px; height: 18px; transform: rotate(-90deg); position: absolute; color: rgb(231, 231, 231);" + style="width: 18px; height: 18px; transform: rotate(-90deg); position: absolute; color: rgb(82, 28, 116);" > { >
test .png diff --git a/tsconfig.build.json b/tsconfig.build.json index 1d01576826..514ed63105 100644 --- a/tsconfig.build.json +++ b/tsconfig.build.json @@ -12,7 +12,6 @@ "./packages/@types", "./node_modules/@types", "./3rd-party/auth/packages/**/src", - "./3rd-party/js-libp2p-noise/src/**/*", ], "lib": [ "ES2020",