本文整理汇总了TypeScript中JSONStream.parse函数的典型用法代码示例。如果您正苦于以下问题:TypeScript parse函数的具体用法?TypeScript parse怎么用?TypeScript parse使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了parse函数的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。
示例1: request
let go = (options: request.Options) => {
let nextUri: string = undefined;
if (options.uri === undefined || disposed) {
observer.onCompleted();
} else {
console.log('fetching', options.uri);
let incoming = request(options)
.on('error', (e: any) => observer.onError(e))
.on('response', (resp: IncomingMessage) => {
nextUri = nextLink(resp);
// NOTE: Only store the etag from the request to the initial URI
if (options.uri === initialUri) {
let etag = resp.headers['etag'];
}
});
if (expr) {
incoming.pipe(JSONStream.parse(expr))
.on('error', (e: any) => observer.onError(e))
.on('data', (data: any) => {
// NOTE: Always use the initial URI as the key for cache
observer.onNext(data);
})
.on('end', () => go(assign({}, options, { uri: nextUri })))
} else {
let chunks: any[] = [];
incoming
.on('data', (data: string) => chunks.push(data))
.on('end', () => {
observer.onNext(JSON.parse(chunks.join("")))
observer.onCompleted();
})
}
}
}
示例2: exportSchema
export function exportSchema(
db: Connection,
{ schema, pubsub }: IExportSchemaOptions
): Readable {
const groupTables = createGroupTablesTransform(pubsub)
const handleError = err => pubsub.emit('export-error', err)
const sql = db.isInMemory() ? hsqlQuery : db2Query
return db
.createReadStream(sql, [schema])
.on('error', handleError)
.pipe(JSONStream.parse([true]))
.on('error', handleError)
.pipe(groupTables)
.on('error', handleError)
.pipe(JSONStream.stringify())
.on('error', handleError)
}
示例3: Promise
return new Promise((resolve, reject) => {
let currentResolve = resolve
let currentReject = reject
const handleError = err => currentReject(err)
const stream = st
.asStream({
bufferSize: 130,
})
.on('error', handleError)
.pipe(JSONStream.parse([true]))
.on('error', handleError)
stream.on('data', (data: string[]) => {
buffer.push(data)
if (buffer.length >= 131) {
stream.pause()
currentResolve({
data: buffer.splice(0, 131),
more() {
stream.resume()
return new Promise(
(resolve2, reject2) => {
currentResolve = resolve2
currentReject = reject2
}
)
},
})
}
})
stream.on('end', () => {
statement = undefined
currentResolve({
data: buffer,
})
})
})
示例4: dockerStream
export function dockerStream (stream: any, onFinished: (err: any, data: any) => void, onProgress: (event: any) => void) {
var parser = JSONStream.parse(),
output: any = [];
parser.on('root', onStreamEvent);
parser.on('error', onStreamError);
parser.on('end', onStreamEnd);
stream.pipe(parser);
function onStreamEvent (evt: any) {
if (!(evt instanceof Object)) {
evt = {};
}
output.push(evt);
if (evt.error) {
return onStreamError(evt.error);
}
if (onProgress) {
onProgress(evt);
}
}
function onStreamError (err: any) {
parser.removeListener('root', onStreamEvent);
parser.removeListener('error', onStreamError);
parser.removeListener('end', onStreamEnd);
onFinished(err, output);
}
function onStreamEnd () {
onFinished(null, output);
}
}
示例5: extractJsonFromStream
/**
* Extracts json from stream, takes all the data in path. Currently only works for one `.tar.gz`-ed file.
*
* @param readStream
* @param path For example "items.*" to parse only array items in a json document looking like `{name: "Example file", items: [{id: 1, id: 2}]}`
* @param processData Function that processes each item
*/
public async extractJsonFromStream(
readStream: NodeJS.ReadableStream,
path: string,
processData: DataProcessFunction,
) {
const jsonParser = JSONStream.parse(path);
const dataProcessor = this.createDataProcessor(processData);
const extractor = tar.extract();
return new Promise(res => {
extractor.on('entry', (header, stream, next) => {
stream.pipe(jsonParser).pipe(dataProcessor);
stream.on('end', () => {
next(); // ready for next entry
});
stream.resume(); // just auto drain the stream
});
extractor.on('finish', res);
readStream.pipe(gunzip()).pipe(extractor);
});
}