home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
PC World Komputer 1998 October A
/
Pcwk10a98.iso
/
Inprise
/
TRIAL
/
JBUILDER
/
JSAMPLES.Z
/
ResolverBean.java
< prev
next >
Wrap
Text File
|
1998-05-08
|
15KB
|
428 lines
/*
* Copyright (c) 1997-1998 Borland International, Inc. All Rights Reserved.
*
* This SOURCE CODE FILE, which has been provided by Borland as part
* of a Borland product for use ONLY by licensed users of the product,
* includes CONFIDENTIAL and PROPRIETARY information of Borland.
*
* USE OF THIS SOFTWARE IS GOVERNED BY THE TERMS AND CONDITIONS
* OF THE LICENSE STATEMENT AND LIMITED WARRANTY FURNISHED WITH
* THE PRODUCT.
*
* IN PARTICULAR, YOU WILL INDEMNIFY AND HOLD BORLAND, ITS RELATED
* COMPANIES AND ITS SUPPLIERS, HARMLESS FROM AND AGAINST ANY CLAIMS
* OR LIABILITIES ARISING OUT OF THE USE, REPRODUCTION, OR DISTRIBUTION
* OF YOUR PROGRAMS, INCLUDING ANY CLAIMS OR LIABILITIES ARISING OUT OF
* OR RESULTING FROM THE USE, MODIFICATION, OR DISTRIBUTION OF PROGRAMS
* OR FILES CREATED FROM, BASED ON, AND/OR DERIVED FROM THIS SOURCE
* CODE FILE.
*/
//Title: Custom Providers and Resolvers
//Version: 2.0
//Copyright: Copyright (c) 1998
//Author: Jens Ole Lauridsen
//Company: Borland International
//Description: Tutorial, example of provider and resolver.
package borland.samples.tutorial.dataset.providers;
import borland.jbcl.dataset.*;
import borland.jbcl.util.Variant;
import java.io.*;
// ResolverBean
// Resolver is an abstract base class for all resolvers
// DataLayout is an interface with constants describing the data layout in "data.txt"
//
public class ResolverBean extends Resolver implements DataLayout {
// Set a limit for how large data.txt should grow:
private static final int ROW_MAX = 100;
// resolveData = main Resolver method
// Parameters:
// dataSetView : DataSet to save data from.
//
public void resolveData(DataSet dataSetView) throws DataSetException {
// We are actually saving all the changes in the StorageDataSet:
//
StorageDataSet dataSet = dataSetView.getStorageDataSet();
// Starting the resolution will lock the dataSet for refreshes
// momentarily. The second parameter will result in posting of
// all pending changes in any dataSetView of this data.
//
ProviderHelp.startResolution(dataSet, true);
// Make a quick out, if there are no changes:
if (!dataSet.changesPending()) {
ProviderHelp.endResolution(dataSet);
return;
}
// Read in the file. Each data line is kept as a separate string
// in the String array. The index used is the value of the ID
// field of the data itself. This is the column used as rowID in
// this implementation of a Resolver.
//
String[] file = readFile("data.txt");
// Create 3 temporary DataSetView's
DataSetView inserted = new DataSetView();
DataSetView deleted = new DataSetView();
DataSetView updated = new DataSetView();
// Get a couple of temporary Variant's
variant1 = new Variant();
variant2 = new Variant();
// Use this flag to determine if the data was actually successfully
// saved to disk.
boolean resolved = false;
try {
// Get all the inserted, deleted, and updated rows:
dataSet.getInsertedRows(inserted);
dataSet.getDeletedRows(deleted);
dataSet.getUpdatedRows(updated);
/**************************************************************************
// Warning:
// This code is not used, but it demonstrates a point:
// The StorageDataSet of these dataSetViews might not
// be the same instance as the variable "dataSet"
StorageDataSet sds1 = ProviderHelp.getResolverDataSet(deleted);
StorageDataSet sds2 = deleted.getStorageDataSet();
// "sds1" will be the same instance as "dataSet" nomatter what.
// "sds2" will not be the same instance as "dataSet" if the storage
// is a DataStore. Keep this in mind if certain properties are needed
// off the "dataSet" instance in the methods "processDeletes" and
// "processInserts" below.
**************************************************************************/
// Handle all these changes:
processInserts(inserted,file);
processDeletes(deleted,file);
processUpdates(updated,file);
// Write the complete file back to disk:
// Note: one bug here: the "data.txt" file could have been
// changed since it was read. There is no guard against it here.
//
writeFile("data.txt",file);
// Mark that everything was saved properly:
resolved = true;
}
finally {
// Reset the status bits.
// If the changes were successfully saved, this will change the
// status bits accordingly.
dataSet.resetPendingStatus(resolved);
// Remember to always close these temporary dataSetView's.
// Otherwise... there will be memory leaks !
inserted.close();
deleted.close();
updated.close();
// End the resolution process here.
ProviderHelp.endResolution(dataSet);
}
}
// processInserts
// Parameters:
// insertDataSet : DataSet of all the inserted rows.
// file : The internal representation of "data.txt"
//
private void processInserts(DataSet insertDataSet, String[] file) throws DataSetException {
if (!insertDataSet.isEmpty()) {
insertDataSet.first();
int status;
do {
// Note, that DELETED rows that were inserted should not be resolved.
status = insertDataSet.getStatus();
if ((status& RowStatus.DELETED) == 0) {
processInsertRow(insertDataSet, file);
// Mark this row as PENDING_RESOLVED i.e. this change might get resolved now.
ProviderHelp.markPendingStatus(insertDataSet, true);
}
} while(insertDataSet.next());
}
}
// processDeletes
// Parameters:
// deleteDataSet : DataSet of all the deleted rows.
// file : The internal representation of "data.txt"
//
private void processDeletes(DataSet deleteDataSet, String[] file) throws DataSetException {
if (!deleteDataSet.isEmpty()) {
deleteDataSet.first();
int status;
do {
// Note, that INSERTED rows that were deleted should not be resolved.
status = deleteDataSet.getStatus();
if ((status & RowStatus.INSERTED) == 0) {
processDeleteRow(deleteDataSet, file);
// Mark this row as PENDING_RESOLVED i.e. this change might get resolved now.
ProviderHelp.markPendingStatus(deleteDataSet, true);
}
} while (deleteDataSet.next());
}
}
// processUpdates
// Parameters:
// updateDataSet : DataSet of all the updated rows.
// file : The internal representation of "data.txt"
//
private void processUpdates(DataSet updateDataSet, String[] file) throws DataSetException {
if (!updateDataSet.isEmpty()) {
DataRow oldDataRow = new DataRow(updateDataSet);
StorageDataSet dataSetStore = updateDataSet.getStorageDataSet();
updateDataSet.first();
int status;
do {
// Note, that updated rows that were deleted should not be resolved.
status = updateDataSet.getStatus();
if ( (status&RowStatus.DELETED) == 0) {
// Get the original row from the StorageDataSet:
dataSetStore.getOriginalRow(updateDataSet, oldDataRow);
processUpdateRow(updateDataSet, oldDataRow, file);
// Mark this row as PENDING_RESOLVED i.e. this change might get resolved now.
ProviderHelp.markPendingStatus(updateDataSet, true);
}
} while (updateDataSet.next());
}
}
// processInsertRow
// Parameters:
// insertRow : An inserted row.
// file : The internal representation of "data.txt"
//
private void processInsertRow(ReadRow insertRow, String[] file) throws DataSetException {
// Get the value of the rowID:
insertRow.getVariant(ROWID_NAME,variant1);
int id = variant1.getInt();
// If the rowID is invalid or is null, generate one.
// Note, that the QueryResolver doesn't do this. There the
// insertingRow event (from ResolverListener) must be implemented
// to perform a similar task.
if (id <= 0 || id > ROW_MAX || insertRow.isNull(ROWID_NAME)) {
id = generateID(file);
variant1.setInt(id);
}
// If the rowID is already in use, this is an error:
if (file[id] != null)
throw new DataSetException("RowID is already used");
// Construct a line for "data.txt" :
String value = makeField(variant1,0);
for (int i=1; i<COLUMN_COUNT; i++) {
insertRow.getVariant(COLUMN_NAMES[i],variant1);
value += makeField(variant1, i );
}
// Save the new line into the line table:
file[id] = value;
}
// processDeleteRow
// Parameters:
// deleteRow : A deleted row.
// file : The internal representation of "data.txt"
//
private void processDeleteRow(ReadRow deleteRow, String[] file) throws DataSetException {
// Get the value of the rowID:
int id = deleteRow.getInt(ROWID_NAME);
// If the rowID is invalid or is not already in use, this is an error:
if (id <= 0 || id > ROW_MAX || deleteRow.isNull(ROWID_NAME) || file[id] == null)
throw new DataSetException("RowID is not found");
// Delete the line from the line table:
file[id] = null;
}
// processUpdateRow
// Parameters:
// updateRow : An updated row.
// oldDataRow : The original row (as the provider read it)
// file : The internal representation of "data.txt"
//
private void processUpdateRow(ReadRow updateRow, ReadRow oldDataRow, String[] file) throws DataSetException {
// Get the value of the rowID:
int id = oldDataRow.getInt(ROWID_NAME);
// If the rowID is invalid or is not already in use, this is an error:
if (id <= 0 || id > ROW_MAX || oldDataRow.isNull(ROWID_NAME) || file[id] == null)
throw new DataSetException("RowID is not found");
// Merge in the changes from this operation,
// Note, that we are using the raw text from "data.txt" for columns
// that didn't change.
String line = file[id];
String value = "";
int offset = 0;
for (int i=0; i<COLUMN_COUNT; i++) {
updateRow.getVariant( COLUMN_NAMES[i], variant1 );
oldDataRow.getVariant( COLUMN_NAMES[i], variant2 );
if (variant1.equals(variant2))
value += getFieldFromLine(line, offset, i); // No change, use the current column text
else
value += makeField(variant1, i); // Changed!, make a column of the current value
offset += COLUMN_WIDTHS[i];
}
// Store the line to the line table:
file[id] = value;
}
// generateID - Find an index in "file" that is currently unused.
// Parameters:
// file : The internal representation of "data.txt"
//
private int generateID(String[] file) throws DataSetException {
int slot = 1;
while (slot < ROW_MAX && file[slot] != null)
slot++;
if (slot >= ROW_MAX)
throw new DataSetException("Capacity Overload");
return slot;
}
// getFieldFromLine - Extract a column from the current line
// Parameters:
// line : The current line
// offset : The starting offset of the column.
// columnNo: The column number of the wanted column.
//
private String getFieldFromLine(String line, int offset, int columnNo) {
int width = COLUMN_WIDTHS[columnNo];
return line.substring(offset,offset+width);
}
// makeField - Make a textual representation of the column value
// Parameters:
// value : The value of the column.
// columnNo: The column number.
//
private String makeField(Variant value, int columnNo) {
// Get the string value:
String val = "";
if (!value.isNull()) {
switch (value.getType()) {
case Variant.STRING:
val = value.getString();
break;
case Variant.INT:
val = Integer.toString(value.getInt());
break;
case Variant.BIGDECIMAL:
val = value.getBigDecimal().toString();
break;
}
}
// Make sure to use the correct column width:
int width = COLUMN_WIDTHS[columnNo];
if (val.length() > width)
val = val.substring(0,width); // Zap long column values...
else if (val.length() < width) {
// Pad with spaces...
char[] chars = new char[width];
for (int i=0; i<width; i++)
chars[i] = ' ';
int start;
if (COLUMN_TYPES[columnNo] == Variant.STRING)
start = 0; // Left align strings...
else
start = width - val.length(); // Right align all other types...
val.getChars(0, val.length(), chars, 0);
val = new String(chars);
}
return val;
}
// skipLine - Skip up to next LF character:
//
private void skipLine(InputStream stream) throws IOException {
int ch = stream.read();
while (ch != '\n')
ch = stream.read();
}
// readFile - Read the context of the file 'filename'.
// Create a String array and store each line in there.
// Use the value of the ID as the index of the array.
private String[] readFile(String filename) throws DataSetException {
try {
String[] result = new String[100];
FileInputStream fs = new FileInputStream("data.txt");
int rowWidth = 0;
for (int i=0; i<COLUMN_COUNT; i++)
rowWidth += COLUMN_WIDTHS[i];
byte[] buffer = new byte[rowWidth];
skipLine(fs);
skipLine(fs);
while (true) {
int len = fs.read(buffer);
if (len < rowWidth)
break;
skipLine(fs);
String value = new String(buffer);
String idStr = value.substring(0,COLUMN_WIDTHS[0]);
int id = Integer.parseInt(idStr.trim());
result[id] = new String(buffer);
}
fs.close();
return result;
}
catch (IOException ex) {
DataSetException.IOException(ex);
return null;
}
}
// writeFile
// Write the file to disk, with changes applied here.
//
private void writeFile(String filename, String[] file) throws DataSetException {
try {
FileOutputStream fs = new FileOutputStream("data.txt");
fs.write(" 1 2 3 4 5\r\n".getBytes());
fs.write("012345678901234567890123456789012345678901234567890123456789\r\n".getBytes());
for (int i=1; i<ROW_MAX; i++) {
if (file[i] != null) {
fs.write(file[i].getBytes());
fs.write('\r');
fs.write('\n');
}
}
fs.close();
}
catch (IOException ex) {
}
}
private Variant variant1;
private Variant variant2;
}