-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.ts
216 lines (202 loc) · 5.58 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
import * as fs from 'fs';
import * as path from 'path';
import * as request from 'superagent';
import * as yargs from 'yargs';
const DEFAULT_BATCH_SIZE = 200;
const VALID_TYPES = ['school', 'section', 'student', 'teacher', 'term'];
/**
* Output progress indicator.
*/
function displayProgress() {
process.stdout.write('.');
}
/**
* Output error message and terminate process.
* @param msg message to print before terminating process
*/
function fatal(msg: string) {
console.error(msg);
process.exit(1);
}
/**
* Loads a file and returns the contents.
* @param filename file to load
*/
function loadFile(filename: string) {
try {
if (!filename.endsWith('.json')) {
throw new Error(`Invalid filename: '${filename}'. Must end in '.json'.`);
}
const stats = fs.statSync(filename);
if (!stats.isFile()) {
throw new Error(`Invalid filename: '${filename}'`)
}
return JSON.parse(fs.readFileSync(filename).toString());
} catch (e) {
fatal(e.message);
}
}
/**
* Loads credentials and data file then returns the requsite data from
* each.
* @param opts sync options
*/
function loadFiles(opts: SyncOpts) {
const creds: Creds = loadFile(opts.creds);
const data: any[] = loadFile(opts.data);
const url = `${creds.url}/api/subscriptions/${creds.subId}/${opts.type}s`;
return {
data: data,
subId: creds.subId,
token: creds.token,
url,
};
}
/**
* Submit some data to GradeCam for processing.
* @param data data to submit
* @param token API token
* @param url API endpoint
*/
async function submit(data: any, token: string, url: string) {
const response = await (
request
.post(url)
.send(data)
.auth('insight', token)
.set('Accept', 'application/json')
);
if (!response.ok) {
throw new Error(response.body);
}
return response.body;
}
/**
* Submit the data in batches
* @param opts sync options
*/
async function submitBatches(opts: SyncOpts) {
const {data, token, url} = loadFiles(opts);
let index = 0;
let numBatches = 0;
let batch: any;
let results: any = {};
let processed = 0;
const total = data.length;
console.log('Total objects:', total, 'sending in batches of:', opts.batchSize);
try {
if (opts.batchSize == 1) {
for (const obj of data) {
batch = obj;
await submit(obj, token, `${url}/${encodeURIComponent(obj.id)}`);
displayProgress();
index++;
numBatches++;
processed++;
}
return '';
} else {
while ((batch = data.slice(index, index + opts.batchSize)).length) {
const batchResult = await submit(batch, token, url);
let batchTotal = 0;
Object.keys(batchResult).forEach(key => {
results[key] = (results[key] || 0) + batchResult[key];
batchTotal += batchResult[key];
});
displayProgress();
index += opts.batchSize;
numBatches++;
processed += batchTotal;
}
return results;
}
} catch (e) {
console.error('\n\nERROR:', e.message);
throw new Error(`failed at index ${index}, data: ${JSON.stringify(batch, null, ' ')}`);
} finally {
console.log(`\nProcessed ${processed}/${total} in ${numBatches} batches.`);
}
}
/**
* Submit data in a single POST
* @param opts sync options
*/
async function submitData(opts: SyncOpts) {
const {data, token, url} = loadFiles(opts);
const total = data.length;
console.log('Submitting', total, 'objects all at once.')
return await submit(data, token, url);
}
/**
* Sync data to remote system.
* @param opts sync opts
*/
async function syncData(opts: SyncOpts) {
if (!opts.batchSize) {
return await submitData(opts);
} else {
return await submitBatches(opts);
}
}
export async function main() {
const parsed = yargs
.option('batch-size', {
alias: 'b',
default: DEFAULT_BATCH_SIZE,
describe: 'batch size (0: entire file)',
type: 'number',
})
.option('creds', {
alias: 'c',
demandOption: true,
description: 'credentials file',
type: 'string',
})
.option('file', {
alias: 'f',
demandOption: true,
description: 'data file',
type: 'string',
})
.option('type', {
alias: 't',
choices: VALID_TYPES,
demandOption: true,
description: 'object type',
type: 'string',
})
.usage(`Usage: $0 [-b number] -c credFile -f dataFile -t type`)
.help()
.argv;
const opts: SyncOpts = {
batchSize: parsed.batchSize,
creds: parsed.creds,
data: parsed.file,
type: parsed.type.toLowerCase(),
};
const start = Date.now();
try {
const results = await syncData(opts);
if (results) {
console.log(results);
}
console.log(`elapsed: ${(Date.now() - start)/1000}s`);
} catch (e) {
console.log(`elapsed: ${(Date.now() - start)/1000}s`);
fatal(e.message);
}
}
interface Creds {
subId: string;
token: string;
url: string;
}
interface SyncOpts {
creds: string;
data: string;
type: string;
batchSize: number;
}
if (!module.parent) {
main();
}