1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package com.healthmarketscience.jackcess.impl;
18
19 import java.io.Flushable;
20 import java.io.IOException;
21 import java.nio.ByteBuffer;
22 import java.nio.ByteOrder;
23 import java.nio.channels.Channel;
24 import java.nio.channels.FileChannel;
25
26
27
28
29
30
31 public class PageChannel implements Channel, Flushable {
32
33 static final int INVALID_PAGE_NUMBER = -1;
34
35
36 public static final ByteOrder DEFAULT_BYTE_ORDER = ByteOrder.LITTLE_ENDIAN;
37
38
39
40
41 private static final byte[] INVALID_PAGE_BYTE_HEADER =
42 new byte[]{PageTypes.INVALID, (byte)0, (byte)0, (byte)0};
43
44
45 static final int PAGE_GLOBAL_USAGE_MAP = 1;
46
47 static final int ROW_GLOBAL_USAGE_MAP = 0;
48
49
50 private final FileChannel _channel;
51
52 private final boolean _closeChannel;
53
54 private final JetFormat _format;
55
56 private final boolean _autoSync;
57
58
59 private final ByteBuffer _invalidPageBytes =
60 ByteBuffer.wrap(INVALID_PAGE_BYTE_HEADER);
61
62 private final ByteBuffer _forceBytes = ByteBuffer.allocate(1);
63
64 private UsageMap _globalUsageMap;
65
66 private CodecHandler _codecHandler = DefaultCodecProvider.DUMMY_HANDLER;
67
68 private TempPageHolder _fullPageEncodeBufferH;
69 private TempBufferHolder _tempDecodeBufferH;
70 private int _writeCount;
71
72
73
74
75 protected PageChannel(boolean testing) {
76 if(!testing) {
77 throw new IllegalArgumentException();
78 }
79 _channel = null;
80 _closeChannel = false;
81 _format = JetFormat.VERSION_4;
82 _autoSync = false;
83 }
84
85
86
87
88
89 public PageChannel(FileChannel channel, boolean closeChannel,
90 JetFormat format, boolean autoSync)
91 throws IOException
92 {
93 _channel = channel;
94 _closeChannel = closeChannel;
95 _format = format;
96 _autoSync = autoSync;
97 }
98
99
100
101
102 public void initialize(DatabaseImpl database, CodecProvider codecProvider)
103 throws IOException
104 {
105
106 _codecHandler = codecProvider.createHandler(this, database.getCharset());
107 if(!_codecHandler.canEncodePartialPage()) {
108 _fullPageEncodeBufferH =
109 TempPageHolder.newHolder(TempBufferHolder.Type.SOFT);
110 }
111 if(!_codecHandler.canDecodeInline()) {
112 _tempDecodeBufferH = TempBufferHolder.newHolder(
113 TempBufferHolder.Type.SOFT, true);
114 }
115
116
117
118 _globalUsageMap = UsageMap.read(database, PAGE_GLOBAL_USAGE_MAP,
119 ROW_GLOBAL_USAGE_MAP, true);
120 }
121
122 public JetFormat getFormat() {
123 return _format;
124 }
125
126 public boolean isAutoSync() {
127 return _autoSync;
128 }
129
130
131
132
133
134 public void startWrite() {
135 ++_writeCount;
136 }
137
138
139
140
141
142
143 public void startExclusiveWrite() {
144 if(_writeCount != 0) {
145 throw new IllegalArgumentException(
146 "Another write operation is currently in progress");
147 }
148 startWrite();
149 }
150
151
152
153
154
155
156
157
158 public void finishWrite() throws IOException {
159 assertWriting();
160 if((--_writeCount == 0) && _autoSync) {
161 flush();
162 }
163 }
164
165
166
167
168
169 public boolean isWriting() {
170 return(_writeCount > 0);
171 }
172
173
174
175
176 private void assertWriting() {
177 if(!isWriting()) {
178 throw new IllegalStateException("No write operation in progress");
179 }
180 }
181
182
183
184
185 private int getNextPageNumber(long size) {
186 return (int)(size / getFormat().PAGE_SIZE);
187 }
188
189
190
191
192 private long getPageOffset(int pageNumber) {
193 return((long) pageNumber * (long) getFormat().PAGE_SIZE);
194 }
195
196
197
198
199 private void validatePageNumber(int pageNumber)
200 throws IOException
201 {
202 int nextPageNumber = getNextPageNumber(_channel.size());
203 if((pageNumber <= INVALID_PAGE_NUMBER) || (pageNumber >= nextPageNumber)) {
204 throw new IllegalStateException("invalid page number " + pageNumber);
205 }
206 }
207
208
209
210
211
212 public void readPage(ByteBuffer buffer, int pageNumber)
213 throws IOException
214 {
215 if(pageNumber == 0) {
216 readRootPage(buffer);
217 return;
218 }
219
220 validatePageNumber(pageNumber);
221
222 ByteBuffer inPage = buffer;
223 ByteBuffer outPage = buffer;
224 if(!_codecHandler.canDecodeInline()) {
225 inPage = _tempDecodeBufferH.getPageBuffer(this);
226 outPage.clear();
227 }
228
229 inPage.clear();
230 int bytesRead = _channel.read(
231 inPage, (long) pageNumber * (long) getFormat().PAGE_SIZE);
232 inPage.flip();
233 if(bytesRead != getFormat().PAGE_SIZE) {
234 throw new IOException("Failed attempting to read " +
235 getFormat().PAGE_SIZE + " bytes from page " +
236 pageNumber + ", only read " + bytesRead);
237 }
238
239 _codecHandler.decodePage(inPage, outPage, pageNumber);
240 }
241
242
243
244
245 public void readRootPage(ByteBuffer buffer)
246 throws IOException
247 {
248
249
250 buffer.clear();
251 int bytesRead = _channel.read(buffer, 0L);
252 buffer.flip();
253 if(bytesRead != getFormat().PAGE_SIZE) {
254 throw new IOException("Failed attempting to read " +
255 getFormat().PAGE_SIZE + " bytes from page " +
256 0 + ", only read " + bytesRead);
257 }
258
259
260 applyHeaderMask(buffer);
261 }
262
263
264
265
266
267
268 public void writePage(ByteBuffer page, int pageNumber) throws IOException {
269 writePage(page, pageNumber, 0);
270 }
271
272
273
274
275
276
277
278
279 public void writePage(ByteBuffer page, int pageNumber, int pageOffset)
280 throws IOException
281 {
282 assertWriting();
283 validatePageNumber(pageNumber);
284
285 page.rewind().position(pageOffset);
286
287 int writeLen = page.remaining();
288 if((writeLen + pageOffset) > getFormat().PAGE_SIZE) {
289 throw new IllegalArgumentException(
290 "Page buffer is too large, size " + (writeLen + pageOffset));
291 }
292
293 ByteBuffer encodedPage = page;
294 if(pageNumber == 0) {
295
296 applyHeaderMask(page);
297 } else {
298
299 if(!_codecHandler.canEncodePartialPage()) {
300 if((pageOffset > 0) && (writeLen < getFormat().PAGE_SIZE)) {
301
302
303
304
305 ByteBuffer fullPage = _fullPageEncodeBufferH.setPage(
306 this, pageNumber);
307
308
309 fullPage.position(pageOffset);
310 fullPage.put(page);
311 fullPage.rewind();
312
313
314 page = fullPage;
315 pageOffset = 0;
316 writeLen = getFormat().PAGE_SIZE;
317
318 } else {
319
320 _fullPageEncodeBufferH.possiblyInvalidate(pageNumber, null);
321 }
322 }
323
324
325 encodedPage = _codecHandler.encodePage(page, pageNumber, pageOffset);
326
327
328 encodedPage.position(pageOffset).limit(pageOffset + writeLen);
329 }
330
331 try {
332 _channel.write(encodedPage, (getPageOffset(pageNumber) + pageOffset));
333 } finally {
334 if(pageNumber == 0) {
335
336 applyHeaderMask(page);
337 }
338 }
339 }
340
341
342
343
344
345 public int allocateNewPage() throws IOException {
346 assertWriting();
347
348
349 long size = _channel.size();
350 if(size >= getFormat().MAX_DATABASE_SIZE) {
351 throw new IOException("Database is at maximum size " +
352 getFormat().MAX_DATABASE_SIZE);
353 }
354 if((size % getFormat().PAGE_SIZE) != 0L) {
355 throw new IOException("Database corrupted, file size " + size +
356 " is not multiple of page size " +
357 getFormat().PAGE_SIZE);
358 }
359
360 _forceBytes.rewind();
361
362
363
364 int pageOffset = (getFormat().PAGE_SIZE - _forceBytes.remaining());
365 long offset = size + pageOffset;
366 int pageNumber = getNextPageNumber(size);
367
368
369
370 _channel.write(_forceBytes, offset);
371
372 _globalUsageMap.removePageNumber(pageNumber);
373 return pageNumber;
374 }
375
376
377
378
379 public void deallocatePage(int pageNumber) throws IOException {
380 assertWriting();
381
382 validatePageNumber(pageNumber);
383
384
385
386 _invalidPageBytes.rewind();
387 _channel.write(_invalidPageBytes, getPageOffset(pageNumber));
388
389 _globalUsageMap.addPageNumber(pageNumber);
390 }
391
392
393
394
395 public ByteBuffer createPageBuffer() {
396 return createBuffer(getFormat().PAGE_SIZE);
397 }
398
399
400
401
402
403 public static ByteBuffer createBuffer(int size) {
404 return createBuffer(size, DEFAULT_BYTE_ORDER);
405 }
406
407
408
409
410 public static ByteBuffer createBuffer(int size, ByteOrder order) {
411 return ByteBuffer.allocate(size).order(order);
412 }
413
414 @Override
415 public void flush() throws IOException {
416 _channel.force(true);
417 }
418
419 @Override
420 public void close() throws IOException {
421 flush();
422 if(_closeChannel) {
423 _channel.close();
424 }
425 }
426
427 @Override
428 public boolean isOpen() {
429 return _channel.isOpen();
430 }
431
432
433
434
435 private void applyHeaderMask(ByteBuffer buffer) {
436
437 byte[] headerMask = _format.HEADER_MASK;
438 for(int idx = 0; idx < headerMask.length; ++idx) {
439 int pos = idx + _format.OFFSET_MASKED_HEADER;
440 byte b = (byte)(buffer.get(pos) ^ headerMask[idx]);
441 buffer.put(pos, b);
442 }
443 }
444
445
446
447
448
449 public static ByteBuffer narrowBuffer(ByteBuffer buffer, int position,
450 int limit)
451 {
452 return (ByteBuffer)buffer.duplicate()
453 .order(buffer.order())
454 .clear()
455 .limit(limit)
456 .position(position)
457 .mark();
458 }
459
460
461
462
463
464 public static ByteBuffer wrap(byte[] bytes) {
465 return ByteBuffer.wrap(bytes).order(DEFAULT_BYTE_ORDER);
466 }
467 }