Skip to content

Commit eb6b379

Browse files
authored
Merge pull request #27 from dimitrov-d/feature/cli-updates
CLI file upload updates
2 parents 6d7af47 + 38d5d9c commit eb6b379

File tree

8 files changed

+121
-75
lines changed

8 files changed

+121
-75
lines changed

packages/cli/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "@apillon/cli",
33
"description": "▶◀ Apillon CLI tools ▶◀",
4-
"version": "1.2.1",
4+
"version": "1.2.2",
55
"author": "Apillon",
66
"license": "MIT",
77
"main": "./dist/index.js",

packages/cli/src/modules/storage/storage.commands.ts

+1
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ export function createStorageCommands(cli: Command) {
6868
.option('-w, --wrap', 'Wrap uploaded files to an IPFS directory')
6969
.option('-p, --path <string>', 'Path to upload files to')
7070
.option('--await', 'await file CIDs to be resolved')
71+
.option('--ignore', 'ignore files from .gitignore file')
7172
.action(async function (path: string) {
7273
await uploadFromFolder(path, this.optsWithGlobals());
7374
});

packages/cli/src/modules/storage/storage.service.ts

+1
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ export async function uploadFromFolder(
5858
wrapWithDirectory: !!optsWithGlobals.wrap,
5959
directoryPath: optsWithGlobals.path,
6060
awaitCid: !!optsWithGlobals.await,
61+
ignoreFiles: !!optsWithGlobals.ignore,
6162
});
6263
console.log(files);
6364
});

packages/sdk/src/modules/storage/storage-bucket.ts

+12-10
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ export class StorageBucket extends ApillonModel {
130130
);
131131

132132
if (!params?.awaitCid) {
133-
return uploadedFiles;
133+
return this.getUploadedFiles(sessionUuid, uploadedFiles.length);
134134
}
135135

136136
return await this.resolveFileCIDs(sessionUuid, uploadedFiles.length);
@@ -153,7 +153,7 @@ export class StorageBucket extends ApillonModel {
153153
);
154154

155155
if (!params?.awaitCid) {
156-
return uploadedFiles;
156+
return this.getUploadedFiles(sessionUuid, uploadedFiles.length);
157157
}
158158

159159
return await this.resolveFileCIDs(sessionUuid, uploadedFiles.length);
@@ -187,14 +187,7 @@ export class StorageBucket extends ApillonModel {
187187
let retryTimes = 0;
188188
ApillonLogger.log('Resolving file CIDs...');
189189
while (resolvedFiles.length === 0 || !resolvedFiles.every((f) => !!f.CID)) {
190-
resolvedFiles = (await this.listFiles({ sessionUuid, limit })).items.map(
191-
(file) => ({
192-
fileName: file.name,
193-
fileUuid: file.uuid,
194-
CID: file.CID,
195-
CIDv1: file.CIDv1,
196-
}),
197-
);
190+
resolvedFiles = await this.getUploadedFiles(sessionUuid, limit);
198191

199192
await new Promise((resolve) => setTimeout(resolve, 1000));
200193
if (++retryTimes >= 15) {
@@ -205,6 +198,15 @@ export class StorageBucket extends ApillonModel {
205198
return resolvedFiles;
206199
}
207200

201+
private async getUploadedFiles(sessionUuid: string, limit: number) {
202+
return (await this.listFiles({ sessionUuid, limit })).items.map((file) => ({
203+
fileName: file.name,
204+
fileUuid: file.uuid,
205+
CID: file.CID,
206+
// CIDv1: file.CIDv1,
207+
}));
208+
}
209+
208210
//#region IPNS methods
209211

210212
/**
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
index.html

packages/sdk/src/tests/storage.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,18 @@ describe('Storage tests', () => {
107107
expect(files.every((f) => !!f.CID)).toBeTruthy();
108108
});
109109

110+
test('upload files from folder with ignoreFiles = false', async () => {
111+
const uploadDir = resolve(__dirname, './helpers/website/');
112+
113+
console.time('File upload complete');
114+
const files = await storage
115+
.bucket(bucketUuid)
116+
.uploadFromFolder(uploadDir, { ignoreFiles: false });
117+
expect(files.length).toEqual(3); // .gitignore and index.html are not ignored
118+
119+
console.timeEnd('File upload complete');
120+
});
121+
110122
test('upload files from buffer', async () => {
111123
const html = fs.readFileSync(
112124
resolve(__dirname, './helpers/website/index.html'),

packages/sdk/src/types/storage.ts

+7
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,15 @@ export interface IFileUploadRequest {
7474

7575
/**
7676
* If set to true, the upload action will wait until files receive a CID from IPFS before returning a result
77+
* @default false
7778
*/
7879
awaitCid?: boolean;
80+
81+
/**
82+
* If set to true, will ignore all the files inside the .gitignore file, including .git and .gitignore itself
83+
* @default true
84+
*/
85+
ignoreFiles?: boolean;
7986
}
8087

8188
export interface IFileUploadResponse {

packages/sdk/src/util/file-utils.ts

+86-64
Original file line numberDiff line numberDiff line change
@@ -12,69 +12,6 @@ import {
1212
import { LogLevel } from '../types/apillon';
1313
import { randomBytes } from 'crypto';
1414

15-
function listFilesRecursive(
16-
folderPath: string,
17-
fileList = [],
18-
relativePath = '',
19-
) {
20-
const gitignorePath = path.join(folderPath, '.gitignore');
21-
const gitignorePatterns = fs.existsSync(gitignorePath)
22-
? fs.readFileSync(gitignorePath, 'utf-8').split('\n')
23-
: [];
24-
gitignorePatterns.push('.git'); // Always ignore .git folder.
25-
26-
const files = fs.readdirSync(folderPath);
27-
for (const file of files) {
28-
const fullPath = path.join(folderPath, file);
29-
const relativeFilePath = path.join(relativePath, file);
30-
31-
// Skip file if it matches .gitignore patterns
32-
if (
33-
gitignorePatterns.some((pattern) =>
34-
new RegExp(pattern).test(relativeFilePath),
35-
)
36-
) {
37-
continue;
38-
}
39-
40-
if (fs.statSync(fullPath).isDirectory()) {
41-
listFilesRecursive(fullPath, fileList, `${relativeFilePath}/`);
42-
} else {
43-
fileList.push({ fileName: file, path: relativePath, index: fullPath });
44-
}
45-
}
46-
return fileList.sort((a, b) => a.fileName.localeCompare(b.fileName));
47-
}
48-
49-
async function uploadFilesToS3(
50-
uploadLinks: (FileMetadata & { url?: string })[],
51-
files: (FileMetadata & { index?: string })[],
52-
) {
53-
const s3Api = axios.create();
54-
const uploadWorkers = [];
55-
56-
for (const link of uploadLinks) {
57-
// console.log(link.url);
58-
const file = files.find(
59-
(x) => x.fileName === link.fileName && (!x.path || x.path === link.path),
60-
);
61-
if (!file) {
62-
throw new Error(`Can't find file ${link.path}${link.fileName}!`);
63-
}
64-
uploadWorkers.push(
65-
new Promise<void>(async (resolve, _reject) => {
66-
// If uploading from local folder then read file, otherwise directly upload content
67-
const content = file.index ? fs.readFileSync(file.index) : file.content;
68-
await s3Api.put(link.url, content);
69-
ApillonLogger.log(`File uploaded: ${file.fileName}`);
70-
resolve();
71-
}),
72-
);
73-
}
74-
75-
await Promise.all(uploadWorkers);
76-
}
77-
7815
export async function uploadFiles(
7916
folderPath: string,
8017
apiPrefix: string,
@@ -88,10 +25,11 @@ export async function uploadFiles(
8825
} else {
8926
throw new Error('Invalid upload parameters received');
9027
}
28+
9129
// If folderPath param passed, read files from local storage
9230
if (folderPath && !files?.length) {
9331
try {
94-
files = listFilesRecursive(folderPath);
32+
files = readFilesFromFolder(folderPath, params?.ignoreFiles);
9533
} catch (err) {
9634
ApillonLogger.log(err.message, LogLevel.ERROR);
9735
throw new Error(`Error reading files in ${folderPath}`);
@@ -124,6 +62,90 @@ export async function uploadFiles(
12462
return { sessionUuid, files: uploadedFiles.flatMap((f) => f) };
12563
}
12664

65+
function readFilesFromFolder(
66+
folderPath: string,
67+
ignoreFiles = true,
68+
): FileMetadata[] {
69+
const gitignorePatterns = [];
70+
if (ignoreFiles) {
71+
ApillonLogger.log('Ignoring files from .gitignore during upload.');
72+
73+
const gitignorePath = path.join(folderPath, '.gitignore');
74+
if (fs.existsSync(gitignorePath)) {
75+
gitignorePatterns.push(
76+
...fs.readFileSync(gitignorePath, 'utf-8').split('\n'),
77+
);
78+
}
79+
// Ignore the following files by default when ignoreFiles = true
80+
gitignorePatterns.push(
81+
'\\.git/?$',
82+
'\\.gitignore$',
83+
'node_modules/?',
84+
'\\.env$',
85+
);
86+
}
87+
88+
const folderFiles = listFilesRecursive(folderPath);
89+
return folderFiles.filter(
90+
(file) =>
91+
// Skip files that match .gitignore patterns
92+
!gitignorePatterns.some(
93+
(pattern) =>
94+
new RegExp(pattern).test(file.fileName) ||
95+
new RegExp(pattern).test(file.path),
96+
),
97+
);
98+
}
99+
100+
function listFilesRecursive(
101+
folderPath: string,
102+
fileList = [],
103+
relativePath = '',
104+
): FileMetadata[] {
105+
const files = fs.readdirSync(folderPath);
106+
107+
for (const file of files) {
108+
const fullPath = path.join(folderPath, file);
109+
const relativeFilePath = path.join(relativePath, file);
110+
111+
if (fs.statSync(fullPath).isDirectory()) {
112+
listFilesRecursive(fullPath, fileList, `${relativeFilePath}/`);
113+
} else {
114+
fileList.push({ fileName: file, path: relativePath, index: fullPath });
115+
}
116+
}
117+
return fileList.sort((a, b) => a.fileName.localeCompare(b.fileName));
118+
}
119+
120+
async function uploadFilesToS3(
121+
uploadLinks: (FileMetadata & { url?: string })[],
122+
files: (FileMetadata & { index?: string })[],
123+
) {
124+
const s3Api = axios.create();
125+
const uploadWorkers = [];
126+
127+
for (const link of uploadLinks) {
128+
// console.log(link.url);
129+
const file = files.find(
130+
(x) => x.fileName === link.fileName && (!x.path || x.path === link.path),
131+
);
132+
if (!file) {
133+
throw new Error(`Can't find file ${link.path}${link.fileName}!`);
134+
}
135+
uploadWorkers.push(
136+
new Promise<void>(async (resolve, _reject) => {
137+
// If uploading from local folder then read file, otherwise directly upload content
138+
const content = file.index ? fs.readFileSync(file.index) : file.content;
139+
await s3Api.put(link.url, content);
140+
ApillonLogger.log(`File uploaded: ${file.fileName}`);
141+
resolve();
142+
}),
143+
);
144+
}
145+
146+
await Promise.all(uploadWorkers);
147+
}
148+
127149
function chunkify(files: FileMetadata[], chunkSize = 10): FileMetadata[][] {
128150
// Divide files into chunks for parallel processing and uploading
129151
const fileChunks: FileMetadata[][] = [];

0 commit comments

Comments
 (0)