deno.land / std@0.224.0 / streams / reader_from_stream_reader_test.ts

reader_from_stream_reader_test.ts
View Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { assert, assertEquals } from "../assert/mod.ts";import { copy } from "../io/copy.ts";import { readerFromStreamReader } from "./reader_from_stream_reader.ts";import { Buffer } from "../io/buffer.ts";
function repeat(c: string, bytes: number): Uint8Array { assertEquals(c.length, 1); const ui8 = new Uint8Array(bytes); ui8.fill(c.charCodeAt(0)); return ui8;}
Deno.test("readerFromStreamReader()", async function () { const chunks: string[] = ["hello", "deno", "land"]; const expected = chunks.slice(); const readChunks: Uint8Array[] = []; const readableStream = ReadableStream.from(chunks) .pipeThrough(new TextEncoderStream());
const decoder = new TextDecoder(); const reader = readerFromStreamReader(readableStream.getReader());
let i = 0;
while (true) { const b = new Uint8Array(1024); const n = await reader.read(b);
if (n === null) break;
readChunks.push(b.subarray(0, n)); assert(i < expected.length);
i++; }
assertEquals( expected, readChunks.map((chunk) => decoder.decode(chunk)), );});
Deno.test("readerFromStreamReader() handles big chunks", async function () { const bufSize = 1024; const chunkSize = 3 * bufSize; const writer = new Buffer();
// A readable stream can enqueue chunks bigger than Copy bufSize // Reader returned by toReader should enqueue exceeding bytes const chunks: string[] = [ "a".repeat(chunkSize), "b".repeat(chunkSize), "c".repeat(chunkSize), ]; const expected = chunks.slice(); const readableStream = ReadableStream.from(chunks) .pipeThrough(new TextEncoderStream());
const reader = readerFromStreamReader(readableStream.getReader()); const n = await copy(reader, writer, { bufSize });
const expectedWritten = chunkSize * expected.length; assertEquals(n, chunkSize * expected.length); assertEquals(writer.length, expectedWritten);});
Deno.test("readerFromStreamReader() handles irregular chunks", async function () { const bufSize = 1024; const chunkSize = 3 * bufSize; const writer = new Buffer();
// A readable stream can enqueue chunks bigger than Copy bufSize // Reader returned by toReader should enqueue exceeding bytes const chunks: Uint8Array[] = [ repeat("a", chunkSize), repeat("b", chunkSize + 253), repeat("c", chunkSize + 8), ]; const expected = new Uint8Array( chunks .slice() .map((chunk) => [...chunk]) .flat(), ); const readableStream = ReadableStream.from(chunks);
const reader = readerFromStreamReader(readableStream.getReader());
const n = await copy(reader, writer, { bufSize }); assertEquals(n, expected.length); assertEquals(expected, writer.bytes());});
std

Version Info

Tagged at
6 months ago