1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package com.healthmarketscience.jackcess.impl;
18
19 import java.io.Flushable;
20 import java.io.IOException;
21 import java.nio.ByteBuffer;
22 import java.nio.ByteOrder;
23 import java.nio.channels.Channel;
24 import java.nio.channels.FileChannel;
25
26
27
28
29
30
31 public class PageChannel implements Channel, Flushable {
32
33 static final int INVALID_PAGE_NUMBER = -1;
34
35
36 public static final ByteOrder DEFAULT_BYTE_ORDER = ByteOrder.LITTLE_ENDIAN;
37
38
39
40
41 private static final byte[] INVALID_PAGE_BYTE_HEADER =
42 new byte[]{PageTypes.INVALID, (byte)0, (byte)0, (byte)0};
43
44
45 static final int PAGE_GLOBAL_USAGE_MAP = 1;
46
47 static final int ROW_GLOBAL_USAGE_MAP = 0;
48
49
50 private final FileChannel _channel;
51
52 private final boolean _closeChannel;
53
54 private final JetFormat _format;
55
56 private final boolean _autoSync;
57
58
59 private final ByteBuffer _invalidPageBytes =
60 ByteBuffer.wrap(INVALID_PAGE_BYTE_HEADER);
61
62 private final ByteBuffer _forceBytes = ByteBuffer.allocate(1);
63
64 private UsageMap _globalUsageMap;
65
66 private CodecHandler _codecHandler = DefaultCodecProvider.DUMMY_HANDLER;
67
68 private TempPageHolder _fullPageEncodeBufferH;
69 private TempBufferHolder _tempDecodeBufferH;
70 private int _writeCount;
71
72
73
74
75 protected PageChannel(boolean testing) {
76 if(!testing) {
77 throw new IllegalArgumentException();
78 }
79 _channel = null;
80 _closeChannel = false;
81 _format = JetFormat.VERSION_4;
82 _autoSync = false;
83 }
84
85
86
87
88
89 public PageChannel(FileChannel channel, boolean closeChannel,
90 JetFormat format, boolean autoSync)
91 {
92 _channel = channel;
93 _closeChannel = closeChannel;
94 _format = format;
95 _autoSync = autoSync;
96 }
97
98
99
100
101 public void initialize(DatabaseImpl database, CodecProvider codecProvider)
102 throws IOException
103 {
104
105 _codecHandler = codecProvider.createHandler(this, database.getCharset());
106 if(!_codecHandler.canEncodePartialPage()) {
107 _fullPageEncodeBufferH =
108 TempPageHolder.newHolder(TempBufferHolder.Type.SOFT);
109 }
110 if(!_codecHandler.canDecodeInline()) {
111 _tempDecodeBufferH = TempBufferHolder.newHolder(
112 TempBufferHolder.Type.SOFT, true);
113 }
114
115
116
117 _globalUsageMap = UsageMap.read(database, PAGE_GLOBAL_USAGE_MAP,
118 ROW_GLOBAL_USAGE_MAP, true);
119 }
120
121 public JetFormat getFormat() {
122 return _format;
123 }
124
125 public boolean isAutoSync() {
126 return _autoSync;
127 }
128
129
130
131
132
133 public void startWrite() {
134 ++_writeCount;
135 }
136
137
138
139
140
141
142 public void startExclusiveWrite() {
143 if(_writeCount != 0) {
144 throw new IllegalArgumentException(
145 "Another write operation is currently in progress");
146 }
147 startWrite();
148 }
149
150
151
152
153
154
155
156
157 public void finishWrite() throws IOException {
158 assertWriting();
159 if((--_writeCount == 0) && _autoSync) {
160 flush();
161 }
162 }
163
164
165
166
167
168 public boolean isWriting() {
169 return(_writeCount > 0);
170 }
171
172
173
174
175 private void assertWriting() {
176 if(!isWriting()) {
177 throw new IllegalStateException("No write operation in progress");
178 }
179 }
180
181
182
183
184 private int getNextPageNumber(long size) {
185 return (int)(size / getFormat().PAGE_SIZE);
186 }
187
188
189
190
191 private long getPageOffset(int pageNumber) {
192 return((long) pageNumber * (long) getFormat().PAGE_SIZE);
193 }
194
195
196
197
198 private void validatePageNumber(int pageNumber)
199 throws IOException
200 {
201 int nextPageNumber = getNextPageNumber(_channel.size());
202 if((pageNumber <= INVALID_PAGE_NUMBER) || (pageNumber >= nextPageNumber)) {
203 throw new IllegalStateException("invalid page number " + pageNumber);
204 }
205 }
206
207
208
209
210
211 public void readPage(ByteBuffer buffer, int pageNumber)
212 throws IOException
213 {
214 if(pageNumber == 0) {
215 readRootPage(buffer);
216 return;
217 }
218
219 validatePageNumber(pageNumber);
220
221 ByteBuffer inPage = buffer;
222 ByteBuffer outPage = buffer;
223 if(!_codecHandler.canDecodeInline()) {
224 inPage = _tempDecodeBufferH.getPageBuffer(this);
225 outPage.clear();
226 }
227
228 inPage.clear();
229 int bytesRead = _channel.read(
230 inPage, (long) pageNumber * (long) getFormat().PAGE_SIZE);
231 inPage.flip();
232 if(bytesRead != getFormat().PAGE_SIZE) {
233 throw new IOException("Failed attempting to read " +
234 getFormat().PAGE_SIZE + " bytes from page " +
235 pageNumber + ", only read " + bytesRead);
236 }
237
238 _codecHandler.decodePage(inPage, outPage, pageNumber);
239 }
240
241
242
243
244 public void readRootPage(ByteBuffer buffer)
245 throws IOException
246 {
247
248
249 buffer.clear();
250 int bytesRead = _channel.read(buffer, 0L);
251 buffer.flip();
252 if(bytesRead != getFormat().PAGE_SIZE) {
253 throw new IOException("Failed attempting to read " +
254 getFormat().PAGE_SIZE + " bytes from page " +
255 0 + ", only read " + bytesRead);
256 }
257
258
259 applyHeaderMask(buffer);
260 }
261
262
263
264
265
266
267 public void writePage(ByteBuffer page, int pageNumber) throws IOException {
268 writePage(page, pageNumber, 0);
269 }
270
271
272
273
274
275
276
277
278 public void writePage(ByteBuffer page, int pageNumber, int pageOffset)
279 throws IOException
280 {
281 assertWriting();
282 validatePageNumber(pageNumber);
283
284 page.rewind().position(pageOffset);
285
286 int writeLen = page.remaining();
287 if((writeLen + pageOffset) > getFormat().PAGE_SIZE) {
288 throw new IllegalArgumentException(
289 "Page buffer is too large, size " + (writeLen + pageOffset));
290 }
291
292 ByteBuffer encodedPage = page;
293 if(pageNumber == 0) {
294
295 applyHeaderMask(page);
296 } else {
297
298 if(!_codecHandler.canEncodePartialPage()) {
299 if((pageOffset > 0) && (writeLen < getFormat().PAGE_SIZE)) {
300
301
302
303
304 ByteBuffer fullPage = _fullPageEncodeBufferH.setPage(
305 this, pageNumber);
306
307
308 fullPage.position(pageOffset);
309 fullPage.put(page);
310 fullPage.rewind();
311
312
313 page = fullPage;
314 pageOffset = 0;
315 writeLen = getFormat().PAGE_SIZE;
316
317 } else {
318
319 _fullPageEncodeBufferH.possiblyInvalidate(pageNumber, null);
320 }
321 }
322
323
324 encodedPage = _codecHandler.encodePage(page, pageNumber, pageOffset);
325
326
327 encodedPage.position(pageOffset).limit(pageOffset + writeLen);
328 }
329
330 try {
331 _channel.write(encodedPage, (getPageOffset(pageNumber) + pageOffset));
332 } finally {
333 if(pageNumber == 0) {
334
335 applyHeaderMask(page);
336 }
337 }
338 }
339
340
341
342
343
344 public int allocateNewPage() throws IOException {
345 assertWriting();
346
347
348 long size = _channel.size();
349 if(size >= getFormat().MAX_DATABASE_SIZE) {
350 throw new IOException("Database is at maximum size " +
351 getFormat().MAX_DATABASE_SIZE);
352 }
353 if((size % getFormat().PAGE_SIZE) != 0L) {
354 throw new IOException("Database corrupted, file size " + size +
355 " is not multiple of page size " +
356 getFormat().PAGE_SIZE);
357 }
358
359 _forceBytes.rewind();
360
361
362
363 int pageOffset = (getFormat().PAGE_SIZE - _forceBytes.remaining());
364 long offset = size + pageOffset;
365 int pageNumber = getNextPageNumber(size);
366
367
368
369 _channel.write(_forceBytes, offset);
370
371 _globalUsageMap.removePageNumber(pageNumber);
372 return pageNumber;
373 }
374
375
376
377
378 public void deallocatePage(int pageNumber) throws IOException {
379 assertWriting();
380
381 validatePageNumber(pageNumber);
382
383
384
385 _invalidPageBytes.rewind();
386 _channel.write(_invalidPageBytes, getPageOffset(pageNumber));
387
388 _globalUsageMap.addPageNumber(pageNumber);
389 }
390
391
392
393
394 public ByteBuffer createPageBuffer() {
395 return createBuffer(getFormat().PAGE_SIZE);
396 }
397
398
399
400
401
402 public static ByteBuffer createBuffer(int size) {
403 return createBuffer(size, DEFAULT_BYTE_ORDER);
404 }
405
406
407
408
409 public static ByteBuffer createBuffer(int size, ByteOrder order) {
410 return ByteBuffer.allocate(size).order(order);
411 }
412
413 @Override
414 public void flush() throws IOException {
415 _channel.force(true);
416 }
417
418 @Override
419 public void close() throws IOException {
420 flush();
421 if(_closeChannel) {
422 _channel.close();
423 }
424 }
425
426 @Override
427 public boolean isOpen() {
428 return _channel.isOpen();
429 }
430
431
432
433
434 private void applyHeaderMask(ByteBuffer buffer) {
435
436 byte[] headerMask = _format.HEADER_MASK;
437 for(int idx = 0; idx < headerMask.length; ++idx) {
438 int pos = idx + _format.OFFSET_MASKED_HEADER;
439 byte b = (byte)(buffer.get(pos) ^ headerMask[idx]);
440 buffer.put(pos, b);
441 }
442 }
443
444
445
446
447
448 public static ByteBuffer narrowBuffer(ByteBuffer buffer, int position,
449 int limit)
450 {
451 return (ByteBuffer)buffer.duplicate()
452 .order(buffer.order())
453 .clear()
454 .limit(limit)
455 .position(position)
456 .mark();
457 }
458
459
460
461
462
463 public static ByteBuffer wrap(byte[] bytes) {
464 return ByteBuffer.wrap(bytes).order(DEFAULT_BYTE_ORDER);
465 }
466 }