2
0
Fork 0
mirror of https://github.com/ethauvin/rife2.git synced 2025-04-30 18:48:13 -07:00

Added database and scheduler modules.

Added pretty engine exceptions support.
This commit is contained in:
Geert Bevin 2022-10-05 23:32:05 -04:00
parent aa07b0a9d0
commit c3ab3fdf41
540 changed files with 66095 additions and 204 deletions

2
.gitignore vendored
View file

@ -7,3 +7,5 @@ build
lib/src/generated
lib/src/main/gen
.idea
lib/embedded_dbs
derby.log

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife;
@ -32,6 +32,7 @@ public class TestSite extends Site {
Template template = context.getHtmlTemplate("Example");
template.setValue("one", one);
template.setValue("two", two);
template.setValue("three", two);
context.print(template);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife;

View file

@ -28,6 +28,12 @@ dependencies {
compileOnly("org.eclipse.jetty:jetty-servlet:11.0.12")
compileOnly("jakarta.servlet:jakarta.servlet-api:5.0.0")
testImplementation("org.junit.jupiter:junit-jupiter:5.8.2")
testImplementation("org.postgresql:postgresql:42.5.0")
testImplementation("mysql:mysql-connector-java:8.0.30")
testImplementation("org.hsqldb:hsqldb:2.7.0")
testImplementation("com.h2database:h2:2.1.214")
testImplementation("org.apache.derby:derby:10.16.1.1")
testImplementation("org.apache.derby:derbytools:10.16.1.1")
}
tasks.generateGrammarSource {

View file

@ -0,0 +1,37 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife;
import rife.resources.ResourceFinderClasspath;
import rife.resources.exceptions.ResourceFinderErrorException;
public class Version {
private String mVersion = null;
Version() {
ResourceFinderClasspath resource_finder = ResourceFinderClasspath.instance();
try {
mVersion = resource_finder.getContent("RIFE_VERSION");
} catch (ResourceFinderErrorException e) {
mVersion = null;
}
if (mVersion != null) {
mVersion = mVersion.trim();
}
if (null == mVersion) {
mVersion = "unknown version";
}
}
private String getVersionString() {
return mVersion;
}
public static String getVersion() {
return VersionSingleton.INSTANCE.getVersionString();
}
}

View file

@ -0,0 +1,14 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife;
/**
* Helper class to avoid Double Check Locking
* and still have a thread-safe singleton pattern
*/
class VersionSingleton {
static final Version INSTANCE = new Version();
}

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config;
@ -35,10 +35,18 @@ public class RifeConfig {
return instance().global;
}
public static DatabaseConfig database() {
return instance().database;
}
public static EngineConfig engine() {
return instance().engine;
}
public static SchedulerConfig scheduler() {
return instance().scheduler;
}
public static ServerConfig server() {
return instance().server;
}
@ -52,7 +60,9 @@ public class RifeConfig {
}
public final GlobalConfig global = new GlobalConfig();
public final DatabaseConfig database = new DatabaseConfig();
public final EngineConfig engine = new EngineConfig();
public final SchedulerConfig scheduler = new SchedulerConfig();
public final ServerConfig server = new ServerConfig();
public final TemplateConfig template = new TemplateConfig();
public final ToolsConfig tools = new ToolsConfig();
@ -84,6 +94,32 @@ public class RifeConfig {
}
}
public class DatabaseConfig {
private int transactionTimeout_ = DEFAULT_TRANSACTION_TIMEOUT;
private boolean sqlDebugTrace_ = DEFAULT_SQL_DEBUG_TRACE;
private static final int DEFAULT_TRANSACTION_TIMEOUT = 0; // 0 seconds : turned off
private static final boolean DEFAULT_SQL_DEBUG_TRACE = false;
public int getTransactionTimeout() {
return transactionTimeout_;
}
public DatabaseConfig setTransactionTimeout(int timeout) {
transactionTimeout_ = timeout;
return this;
}
public boolean getSqlDebugTrace() {
return DEFAULT_SQL_DEBUG_TRACE;
}
public DatabaseConfig setSqlDebugTrace(boolean flag) {
sqlDebugTrace_ = flag;
return this;
}
}
public class EngineConfig {
private String defaultContentType_ = DEFAULT_DEFAULT_CONTENT_TYPE;
private boolean prettyEngineExceptions_ = DEFAULT_PRETTY_ENGINE_EXCEPTIONS;
@ -259,6 +295,44 @@ public class RifeConfig {
}
}
public class SchedulerConfig {
public static final String DEFAULT_TABLE_TASK = "SchedTask";
public static final String DEFAULT_SEQUENCE_TASK = "SEQ_SCHEDTASK";
public static final String DEFAULT_TABLE_TASKOPTION = "SchedTaskoption";
public static final int DEFAULT_TASKOPTION_NAME_MAXIMUM_LENGTH = 255;
public static final int DEFAULT_TASKOPTION_VALUE_MAXIMUM_LENGTH = 255;
public static final int DEFAULT_TASK_TYPE_MAXIMUM_LENGTH = 255;
public static final int DEFAULT_TASK_FREQUENCY_MAXIMUM_LENGTH = 255;
public int getTaskTypeMaximumLength() {
return DEFAULT_TASK_TYPE_MAXIMUM_LENGTH;
}
public int getTaskFrequencyMaximumLength() {
return DEFAULT_TASK_FREQUENCY_MAXIMUM_LENGTH;
}
public int getTaskoptionValueMaximumLength() {
return DEFAULT_TASKOPTION_VALUE_MAXIMUM_LENGTH;
}
public int getTaskoptionNameMaximumLength() {
return DEFAULT_TASKOPTION_NAME_MAXIMUM_LENGTH;
}
public String getTableTask() {
return DEFAULT_TABLE_TASK;
}
public String getSequenceTask() {
return DEFAULT_SEQUENCE_TASK;
}
public String getTableTaskoption() {
return DEFAULT_TABLE_TASKOPTION;
}
}
public class ServerConfig {
private int port_ = DEFAULT_PORT;
private String staticResourceBase_ = DEFAULT_STATIC_RESOURCE_BASE;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config.exceptions;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config.exceptions;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config.exceptions;

View file

@ -1,5 +1,5 @@
/*
* Copyright 2001-2022 Geert Bevin (gbevin[remove] at uwyn dot com)
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.config.exceptions;

View file

@ -0,0 +1,272 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.DatabaseException;
import java.util.ArrayList;
import java.util.HashMap;
/**
* This is a class designed for database connection pooling. By storing
* connections, along with the thread that they are assigned to, thread-aware
* operations can be performed safely, securely, and more efficiently.
*
* @author JR Boyens (jboyens[remove] at uwyn dot com)
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public class ConnectionPool implements AutoCloseable {
private int poolSize_ = 0;
private ArrayList<DbConnection> connectionPool_ = new ArrayList<>();
private HashMap<Thread, DbConnection> ThreadConnections_ = new HashMap<>();
/**
* Create a new ConnectionPool
*
* @since 1.0
*/
ConnectionPool() {
}
/**
* Set the size of the connection pool
*
* @param poolSize the new size of the pool
* @since 1.0
*/
void setPoolSize(int poolSize) {
synchronized (this) {
if (connectionPool_.size() > 0) {
cleanup();
}
poolSize_ = poolSize;
}
}
/**
* Get the size of the connection pool
*
* @return int the size of the connection pool
* @since 1.0
*/
int getPoolSize() {
return poolSize_;
}
/**
* Check if the connection pool is initialized
*
* @return boolean true if initialized; false if not
* @since 1.0
*/
boolean isInitialized() {
return connectionPool_.size() > 0;
}
/**
* Fill the pool with connections. Prepare the pool by filling it with
* connections from the provided datasource
*
* @param datasource the {@link Datasource} to fill the pool with
* connections from
* @throws DatabaseException when an error occured during the
* preparation of the pool
* @since 1.0
*/
void preparePool(Datasource datasource)
throws DatabaseException {
synchronized (this) {
cleanup();
connectionPool_.ensureCapacity(poolSize_);
for (int i = 0; i < poolSize_; i++) {
connectionPool_.add(datasource.createConnection());
}
assert poolSize_ == connectionPool_.size();
this.notifyAll();
}
}
/**
* Cleans up all connections that have been reserved by this
* datasource.
*
* @throws DatabaseException when an error occured during the
* clearing of the pool
* @since 1.0
*/
public void cleanup()
throws DatabaseException {
synchronized (this) {
if (0 == connectionPool_.size()) {
return;
}
ArrayList<DbConnection> previous_pool;
previous_pool = connectionPool_;
connectionPool_ = new ArrayList<>();
if (previous_pool != null) {
for (DbConnection connection : previous_pool) {
connection.cleanup();
}
previous_pool.clear();
}
ThreadConnections_.clear();
}
}
/**
* Remembers which connection has been reserved for a particular
* thread. This makes sequential operations within the same
* transaction in the same thread be executed on the same connection.
* Otherwise, transaction deadlocks might appear.
*
* @param thread the {@link Thread} to which the connection should be
* registered to
* @param connection the {@link DbConnection} that should be
* registered to the thread
* @since 1.0
*/
void registerThreadConnection(Thread thread, DbConnection connection) {
synchronized (this) {
ThreadConnections_.put(thread, connection);
}
}
/**
* Removes the dedication of a connection for a specific thread.
*
* @param thread the {@link Thread} whose {@link DbConnection} should
* be unregistered.
* @since 1.0
*/
void unregisterThreadConnection(Thread thread) {
synchronized (this) {
ThreadConnections_.remove(thread);
this.notifyAll();
}
}
/**
* Check if a connection reserved for a specific thread.
*
* @param thread the {@link Thread} to check for a reserved connection
* @return true if the passed-in thread has a connection; false if not
* @since 1.0
*/
boolean hasThreadConnection(Thread thread) {
synchronized (this) {
return ThreadConnections_.containsKey(thread);
}
}
/**
* Recreate the connection.
*
* @param connection the {@link DbConnection} to be recreated
* @throws DatabaseException when there is a problem recreating the
* connection or cleaning up the old connection
* @since 1.0
*/
void recreateConnection(DbConnection connection)
throws DatabaseException {
synchronized (this) {
if (connectionPool_.remove(connection)) {
connectionPool_.add(connection.getDatasource().createConnection());
}
connection.cleanup();
}
}
/**
* Retrieve this thread's connection.
* <p>Connections are allocated from the pool and assigned to the
* current calling thread. If the thread does not have a current
* connection or the pool size is 0, then a new connection is created
* and assigned to the calling thread.
*
* @param datasource the datasource to create the connection to
* @return the created or retrieved DbConnection
* @throws DatabaseException when an error occurred retrieving or
* creating the connection
* @since 1.0
*/
DbConnection getConnection(Datasource datasource)
throws DatabaseException {
synchronized (this) {
// check if the connection threads contains an entry for the
// current thread so that transactions are treated in a
// continuous fashion
if (ThreadConnections_.containsKey(Thread.currentThread())) {
DbConnection connection = ThreadConnections_.get(Thread.currentThread());
if (connection != null) {
return connection;
}
}
// if there's no pool, create a new connection
if (0 == poolSize_) {
return datasource.createConnection();
}
// otherwise, try to obtain a free connection in the pool
else {
DbConnection connection = null;
// iterate over the available connections and try to obtain the
// first free one
DbConnection possible_connection = null;
while (null == connection) {
// prepare the pool if it's currently empty
if (connectionPool_.size() < poolSize_) {
preparePool(datasource);
}
for (int i = 0; i < connectionPool_.size() && null == connection; i++) {
possible_connection = connectionPool_.get(i);
if (null == possible_connection ||
possible_connection.isCleanedUp()) {
connection = datasource.createConnection();
connectionPool_.set(i, connection);
break;
} else if (null != possible_connection &&
possible_connection.isFree()) {
connection = possible_connection;
break;
}
}
if (null == connection) {
try {
this.wait();
} catch (InterruptedException e) {
Thread.yield();
}
}
}
// move the obtained connection to the end of the connection
// pool list
connectionPool_.remove(connection);
connectionPool_.add(connection);
return connection;
}
}
}
@Override
public void close()
throws Exception {
cleanup();
}
}

View file

@ -0,0 +1,679 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.*;
import rife.database.capabilities.CapabilitiesCompensator;
import rife.database.types.SqlConversion;
import rife.tools.ExceptionUtils;
import rife.tools.StringUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.logging.Logger;
import javax.sql.DataSource;
/**
* Contains all the information required to connect to a database and
* centralizes the creation of connections to a database. These connections can
* optionally be pooled.
* <p>
* The initial connection will only be made and the pool will only be
* initialized when a connection is obtained for the first time. The
* instantiation only stores the connection parameters.
* <p>
* A <code>Datasource</code> also defines the type of database that is used for
* all database-independent logic such as sql to java and java to sql type
* mappings, query builders, database-based authentication, database-based
* scheduling, ... The key that identifies a supported type is the class name of
* the jdbc driver.
* <p>
* A <code>Datasource</code> instance can be created through it's constructor,
* but it's recommended to work with a <code>Datasources</code> collection
* that is created and populated through XML. This can easily be achieved by
* using a <code>ParticipantDatasources</code> which participates in the
* application-wide repository.
* <p>
* Once a connection has been obtained from a pooled datasource, modifying its
* connection parameters is not possible anymore, a new instance has to be
* created to set the parameters to different values.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see rife.database.Datasources
* @since 1.0
*/
public class Datasource implements Cloneable {
static HashMap<String, String> sDriverAliases = new HashMap<String, String>();
static HashMap<String, String> sDriverNames = new HashMap<String, String>();
static {
sDriverAliases.put("org.gjt.mm.mysql.Driver", "com.mysql.cj.jdbc.Driver");
sDriverAliases.put("com.mysql.jdbc.Driver", "com.mysql.cj.jdbc.Driver");
sDriverAliases.put("oracle.jdbc.OracleDriver", "oracle.jdbc.driver.OracleDriver");
sDriverAliases.put("org.apache.derby.jdbc.ClientDriver", "org.apache.derby.jdbc.EmbeddedDriver");
sDriverNames.put("Apache Derby Embedded JDBC Driver", "org.apache.derby.jdbc.EmbeddedDriver");
sDriverNames.put("Apache Derby Network Client JDBC Driver", "org.apache.derby.jdbc.EmbeddedDriver");
sDriverNames.put("H2 JDBC Driver", "org.h2.Driver");
sDriverNames.put("HSQL Database Engine Driver", "org.hsqldb.jdbcDriver");
sDriverNames.put("MySQL-AB JDBC Driver", "com.mysql.cj.jdbc.Driver");
sDriverNames.put("Oracle JDBC driver", "oracle.jdbc.driver.OracleDriver");
sDriverNames.put("PostgreSQL Native Driver", "org.postgresql.Driver");
sDriverNames.put("PostgreSQL JDBC Driver", "org.postgresql.Driver");
}
private String mDriver = null;
private String mUrl = null;
private String mUser = null;
private String mPassword = null;
private SqlConversion mSqlConversion = null;
private CapabilitiesCompensator mCapabilitiesCompensator = null;
private ConnectionPool mConnectionPool = new ConnectionPool();
private DataSource mDataSource = null;
/**
* Instantiates a new <code>Datasource</code> object with no connection
* information. The setters need to be used afterwards to provide each
* required parameter before the <code>Datasource</code> can be used.
*
* @see #setDriver(String)
* @see #setUrl(String)
* @see #setUser(String)
* @see #setPassword(String)
* @see #setPoolsize(int)
* @see #setDataSource(DataSource)
* @since 1.0
*/
public Datasource() {
}
/**
* Instantiates a new <code>Datasource</code> object with all the
* connection parameters that are required.
*
* @param driver the fully-qualified classname of the jdbc driver that will
* be used to connect to the database
* @param url the connection url which identifies the database to which the
* connection will be made, this is entirely driver-dependent
* @param user the user that will be used to connect to the database
* @param password the password that will be used to connect to the database
* @param poolsize the size of the connection pool, <code>0</code> means
* that the connections will not be pooled
* @since 1.0
*/
public Datasource(String driver, String url, String user, String password, int poolsize) {
setDriver(driver);
setUrl(url);
setUser(user);
setPassword(password);
setPoolsize(poolsize);
assert mDriver != null;
assert mDriver.length() > 0;
assert mUrl != null;
assert mUrl.length() > 0;
}
/**
* Instantiates a new <code>Datasource</code> object from a standard
* <code>javax.sql.DataSource</code>.
* <p>
* The driver will be detected from the connection that is provided by this
* <code>DataSource</code>. If the driver couldn't be detected, an exception
* will be thrown upon connect.
*
* @param dataSource the standard datasource that will be used to obtain the
* connection
* @param poolsize the size of the connection pool, <code>0</code> means
* that the connections will not be pooled
* @since 1.3
*/
public Datasource(DataSource dataSource, int poolsize) {
setDataSource(dataSource);
setPoolsize(poolsize);
assert dataSource != null;
}
/**
* Instantiates a new <code>Datasource</code> object from a standard
* <code>javax.sql.DataSource</code>.
*
* @param dataSource the standard datasource that will be used to obtain the
* connection
* @param driver the fully-qualified classname of the jdbc driver that will
* be used to provide an identifier for the database abstraction functionalities,
* <code>null</code> will let RIFE try to figure it out by itself
* @param user the user that will be used to connect to the database
* @param password the password that will be used to connect to the database
* @param poolsize the size of the connection pool, <code>0</code> means
* that the connections will not be pooled
* @since 1.3
*/
public Datasource(DataSource dataSource, String driver, String user, String password, int poolsize) {
setDataSource(dataSource);
mDriver = driver;
mSqlConversion = null;
setUser(user);
setPassword(password);
setPoolsize(poolsize);
assert dataSource != null;
}
/**
* Creates a new connection by using all the parameters that have been
* defined in the <code>Datasource</code>.
*
* @return the newly created <code>DbConnection</code> instance
* @throws DatabaseException when an error occured during the creation of
* the connection
* @since 1.0
*/
DbConnection createConnection()
throws DatabaseException {
Connection connection = null;
if (this.mDataSource != null) {
// try to create a datasource connection
if (null != mUser && null != mPassword) {
try {
connection = this.mDataSource.getConnection(mUser, mPassword);
} catch (SQLException e) {
throw new ConnectionOpenErrorException(null, mUser, mPassword, e);
}
} else {
try {
connection = this.mDataSource.getConnection();
} catch (SQLException e) {
throw new ConnectionOpenErrorException(null, e);
}
}
if (null == mDriver) {
try {
String driver_name = connection.getMetaData().getDriverName();
mDriver = sDriverNames.get(driver_name);
if (null == mDriver) {
throw new UnsupportedDriverNameException(driver_name);
}
} catch (SQLException e) {
throw new DriverNameRetrievalErrorException(e);
}
}
} else {
// obtain the jdbc driver instance
try {
Class.forName(mDriver).newInstance();
} catch (InstantiationException e) {
throw new DriverInstantiationErrorException(mDriver, e);
} catch (ClassNotFoundException e) {
throw new DriverInstantiationErrorException(mDriver, e);
} catch (IllegalAccessException e) {
throw new DriverInstantiationErrorException(mDriver, e);
}
// try to create a jdbc connection
if (null != mUser &&
null != mPassword) {
try {
connection = DriverManager.getConnection(mUrl, mUser, mPassword);
} catch (SQLException e) {
throw new ConnectionOpenErrorException(mUrl, mUser, mPassword, e);
}
} else {
try {
connection = DriverManager.getConnection(mUrl);
} catch (SQLException e) {
throw new ConnectionOpenErrorException(mUrl, e);
}
}
}
// returns a new DbConnection instance with contains the new jdbc
// connection and is linked to this datasource
return new DbConnection(connection, this);
}
/**
* Retrieves a free database connection. If no connection pool is used, a
* new <code>DbConnection</code> will always be created, otherwise the first
* available connection in the pool will be returned.
*
* @return a free <code>DbConnection</code> instance which can be used to
* create an execute statements
* @throws DatabaseException when errors occured during the creation of a
* new connection or during the obtainance of a connection from the pool
* @since 1.0
*/
public DbConnection getConnection()
throws DatabaseException {
return mConnectionPool.getConnection(this);
}
/**
* Retrieves the fully qualified class name of the jdbc driver that's used
* by this <code>Datasource</code>.
*
* @return a <code>String</code> with the name of the jdbc driver; or
* <p>
* <code>null</code> if the driver hasn't been set
* @see #setDriver(String)
* @see #getAliasedDriver()
* @since 1.0
*/
public String getDriver() {
// make sure that a JNDI connection has been made first, so that the database name can be looked up
if (mDataSource != null &&
null == mDriver) {
getConnection();
}
return mDriver;
}
/**
* Retrieves the fully qualified class name of the jdbc driver that's used
* by this <code>Datasource</code>. Instead of straight retrieval of the
* internal value, it looks for jdbc driver aliases and changes the driver
* classname if it's not supported by RIFE, but its alias is.
*
* @return a <code>String</code> with the name of the jdbc driver; or
* <p>
* <code>null</code> if the driver hasn't been set
* @see #getDriver()
* @see #setDriver(String)
* @since 1.0
*/
public String getAliasedDriver() {
String driver = getDriver();
if (null == driver) {
return null;
}
String alias = sDriverAliases.get(driver);
if (null == alias) {
return driver;
}
return alias;
}
/**
* Sets the jdbc driver that will be used to connect to the database. This
* has to be a fully qualified class name and will be looked up through
* reflection. It's not possible to change the driver after a connection
* has been obtained from a pooled datasource.
* <p>
* If the class name can't be resolved, an exception is thrown during the
* creation of the first connection.
*
* @param driver a <code>String</code> with the fully qualified class name
* of the jdbc driver that will be used
* @see #getDriver()
* @since 1.0
*/
public void setDriver(String driver) {
if (null == driver) throw new IllegalArgumentException("driver can't be null.");
if (0 == driver.length()) throw new IllegalArgumentException("driver can't be empty.");
if (mConnectionPool.isInitialized())
throw new IllegalArgumentException("driver can't be changed after the connection pool has been set up.");
mDriver = driver;
mSqlConversion = null;
}
/**
* Retrieves the standard datasource that is used by this RIFE datasource
* to obtain a database connection.
*
* @return a standard <code>DataSource</code>; or
* <p>
* <code>null</code> if the standard datasource hasn't been set
* @see #setDataSource(DataSource)
* @since 1.3
*/
public DataSource getDataSource() {
return mDataSource;
}
/**
* Sets the standard datasource that will be used to connect to the database.
*
* @param dataSource a standard <code>DataSource</code> that will be used
* by this RIFE datasource to obtain a database connection.
* @see #getDataSource()
* @since 1.0
*/
public void setDataSource(DataSource dataSource) {
mDataSource = dataSource;
}
/**
* Retrieves the connection url that's used by this <code>Datasource</code>.
*
* @return a <code>String</code> with the connection url; or
* <p>
* <code>null</code> if the url hasn't been set
* @see #setUrl(String)
* @since 1.0
*/
public String getUrl() {
return mUrl;
}
/**
* Sets the connection url that will be used to connect to the database.
* It's not possible to change the url after a connection has been obtained
* from a pooled datasource.
*
* @param url a <code>String</code> with the connection url that will be
* used
* @see #getUrl()
* @since 1.0
*/
public void setUrl(String url) {
if (null == url) throw new IllegalArgumentException("url can't be null.");
if (0 == url.length()) throw new IllegalArgumentException("url can't be empty.");
if (mConnectionPool.isInitialized())
throw new IllegalArgumentException("url can't be changed after the connection pool has been set up.");
mUrl = url;
}
/**
* Retrieves the user that's used by this <code>Datasource</code>.
*
* @return a <code>String>/code> with the user; or
* <p>
* <code>null</code> if the user hasn't been set
* @see #setUser(String)
* @since 1.0
*/
public String getUser() {
return mUser;
}
/**
* Sets the user that will be used to connect to the database.
* It's not possible to change the user after a connection has been obtained
* from a pooled datasource.
*
* @param user a <code>String</code> with the user that will be used
* @see #getUser()
* @since 1.0
*/
public void setUser(String user) {
if (mConnectionPool.isInitialized())
throw new IllegalArgumentException("user can't be changed after the connection pool has been set up.");
mUser = user;
}
/**
* Retrieves the password that's used by this <code>Datasource</code>.
*
* @return a <code>String>/code> with the password; or
* <p>
* <code>null</code> if the password hasn't been set
* @see #setPassword(String)
* @since 1.0
*/
public String getPassword() {
return mPassword;
}
/**
* Sets the password that will be used to connect to the database.
* It's not possible to change the password after a connection has been
* obtained from a pooled datasource.
*
* @param password a <code>String</code> with the password that will be used
* @see #getPassword()
* @since 1.0
*/
public void setPassword(String password) {
if (mConnectionPool.isInitialized())
throw new IllegalArgumentException("password can't be changed after the connection pool has been set up.");
mPassword = password;
}
/**
* Retrieves the size of the pool that's used by this
* <code>Datasource</code>.
*
* @return a positive <code>int</code> with the size of the pool; or
* <p>
* <code>0</code> if no pool is being used
* @see #isPooled()
* @see #setPoolsize(int)
* @since 1.0
*/
public int getPoolsize() {
return mConnectionPool.getPoolSize();
}
/**
* Indicates whether the <code>Datasource</code> uses a connection pool or
* not
*
* @return <code>true</code> if a pool is being used by this
* <code>Datasource</code>; or
* <p>
* <code>false</code> otherwise
* @see #getPoolsize()
* @see #setPoolsize(int)
* @since 1.0
*/
public boolean isPooled() {
return getPoolsize() > 0;
}
/**
* Sets the size of the connection pool that will be used to connect to the
* database.
*
* @param poolsize a positive <code>int</code> with the size of the pool,
* providing <code>0</code> will disable the use of a connection pool for
* this <code>Datasource</code>.
* @see #getPoolsize()
* @see #isPooled()
* @since 1.0
*/
public void setPoolsize(int poolsize) {
if (poolsize < 0) throw new IllegalArgumentException("poolsize can't be negative.");
mConnectionPool.setPoolSize(poolsize);
}
/**
* Retrieves the sql to java and java to sql type mapping logic that
* corresponds to the provide driver class name.
*
* @return a <code>SqlConversion</code> instance that is able to perform
* the required type conversions for the provided jdbc driver
* @throws UnsupportedJdbcDriverException when the provided jdbc isn't
* supported
* @since 1.0
*/
public SqlConversion getSqlConversion()
throws UnsupportedJdbcDriverException {
String driver = getDriver();
if (null == mSqlConversion &&
null != driver) {
try {
mSqlConversion = (SqlConversion) Class.forName("rife.database.types.databasedrivers." + StringUtils.encodeClassname(getAliasedDriver())).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new UnsupportedJdbcDriverException(driver, e);
}
}
return mSqlConversion;
}
/**
* Retrieves a <code>CapabilitiesCompensator</code> instance that is able to
* compensate for certain missing database features
*
* @return the requested <code>CapabilitiesCompensator</code> instance
* @throws UnsupportedJdbcDriverException when the provided jdbc isn't
* supported
* @since 1.0
*/
public CapabilitiesCompensator getCapabilitiesCompensator()
throws UnsupportedJdbcDriverException {
String driver = getDriver();
if (null == mCapabilitiesCompensator &&
null != driver) {
try {
mCapabilitiesCompensator = (CapabilitiesCompensator) Class.forName("rife.database.capabilities." + StringUtils.encodeClassname(getAliasedDriver())).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new UnsupportedJdbcDriverException(driver, e);
}
}
return mCapabilitiesCompensator;
}
/**
* Returns a hash code value for the <code>Datasource</code>. This method is
* supported for the benefit of hashtables such as those provided by
* <code>java.util.Hashtable</code>.
*
* @return an <code>int</code> with the hash code value for this
* <code>Datasource</code>.
* @see #equals(Object)
* @since 1.0
*/
public int hashCode() {
int dataSourceHash = mDataSource == null ? 1 : mDataSource.hashCode();
int driverHash = mDriver == null ? 1 : mDriver.hashCode();
int urlHash = mUrl == null ? 1 : mUrl.hashCode();
int userHash = mUser == null ? 1 : mUser.hashCode();
int passwordHash = mPassword == null ? 1 : mPassword.hashCode();
return dataSourceHash * driverHash * urlHash * userHash * passwordHash;
}
/**
* Indicates whether some other object is "equal to" this one. Only the
* real connection parameters will be taken into account. The size of the
* pool is not used for the comparison.
*
* @param object the reference object with which to compare.
* @return <code>true</code> if this object is the same as the object
* argument; or
* <p>
* <code>false</code> otherwise
* @see #hashCode()
* @since 1.0
*/
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (null == object) {
return false;
}
if (!(object instanceof Datasource)) {
return false;
}
Datasource other_datasource = (Datasource) object;
if (!other_datasource.getDriver().equals(getDriver())) {
return false;
}
if (other_datasource.getUrl() != null || getUrl() != null) {
if (null == other_datasource.getUrl() || null == getUrl()) {
return false;
}
if (!other_datasource.getUrl().equals(getUrl())) {
return false;
}
}
if (other_datasource.getDataSource() != null || getDataSource() != null) {
if (null == other_datasource.getDataSource() || null == getDataSource()) {
return false;
}
if (!other_datasource.getDataSource().equals(getDataSource())) {
return false;
}
}
if (other_datasource.getUser() != null || getUser() != null) {
if (null == other_datasource.getUser() || null == getUser()) {
return false;
}
if (!other_datasource.getUser().equals(getUser())) {
return false;
}
}
if (other_datasource.getPassword() != null || getPassword() != null) {
if (null == other_datasource.getPassword() || null == getPassword()) {
return false;
}
if (!other_datasource.getPassword().equals(getPassword())) {
return false;
}
}
return true;
}
/**
* Simply clones the instance with the default clone method. This creates a
* shallow copy of all fields and the clone will in fact just be another
* reference to the same underlying data. The independence of each cloned
* instance is consciously not respected since they rely on resources
* that can't be cloned.
*
* @since 1.0
*/
public Datasource clone() {
Datasource other = null;
try {
other = (Datasource) super.clone();
} catch (CloneNotSupportedException e) {
// this should never happen
Logger.getLogger("rife.database").severe(ExceptionUtils.getExceptionStackTrace(e));
return null;
}
other.mSqlConversion = mSqlConversion;
other.mConnectionPool = mConnectionPool;
return other;
}
/**
* Cleans up all connections that have been reserved by this datasource.
*
* @throws DatabaseException when an error occured during the cleanup
* @since 1.0
*/
public void cleanup()
throws DatabaseException {
mConnectionPool.cleanup();
}
/**
* Retrieves the instance of the connection pool that is provided by this
* dtaasource.
*
* @return the requested instance of <code>ConnectionPool</code>
*/
public ConnectionPool getPool() {
return mConnectionPool;
}
}

View file

@ -0,0 +1,108 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.*;
import java.util.Collection;
import java.util.HashMap;
/**
* Contains a collection of <code>Datasource</code> instances.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @version $Revision$
* @see rife.database.Datasource
* @since 1.0
*/
public class Datasources {
private HashMap<String, Datasource> map_ = new HashMap<>();
/**
* Creates a new empty <code>Datasources</code> instance.
*
* @since 1.0
*/
public Datasources()
throws DatasourcesException {
}
/**
* Returns the shared singleton instance of the
* <code>Datasources</code> class.
*
* @return the singleton <code>Datasources</code> instance
* @since 2.0
*/
public static Datasources instance() {
return DatasourcesSingleton.INSTANCE;
}
/**
* Retrieves the <code>Datasource</code> that corresponds to a provided
* name.
*
* @param name a <code>String</code> that identifies the
* <code>Datasource</code> that has to be retrieved
* @return the requested <code>Datasource</code> instance; or
* <p>
* <code>null</code> if name isn't known
* @since 1.0
*/
public Datasource getDatasource(String name) {
return map_.get(name);
}
/**
* Stores a <code>Datasource</code> with a provided name to be able to
* reference it later.
*
* @param name a <code>String</code> that identifies the
* <code>Datasource</code>
* @param datasource the <code>Datasource</code> instance that has to be
* stored
* @since 1.0
*/
public void setDatasource(String name, Datasource datasource) {
if (null == name) throw new IllegalArgumentException("name can't be null.");
if (0 == name.length()) throw new IllegalArgumentException("name can't be empty.");
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
map_.put(name, datasource);
}
/**
* Retrieves a collection of all the <code>Datasource</code> names that are
* known by this <code>Datasources</code> instance.
*
* @return the requested <code>Collection</code>
* @since 1.0
*/
public Collection<String> getDatasourceNames() {
return map_.keySet();
}
/**
* Cleans up all connections that have been reserved by this datasource.
*
* @throws DatabaseException when an error occured during the cleanup
* @since 1.0
*/
public void cleanup()
throws DatabaseException {
synchronized (this) {
if (null == map_) {
return;
}
HashMap<String, Datasource> data_sources = map_;
map_ = null;
for (Datasource datasource : data_sources.values()) {
datasource.cleanup();
}
}
}
}

View file

@ -0,0 +1,9 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
class DatasourcesSingleton {
static final Datasources INSTANCE = new Datasources();
}

View file

@ -0,0 +1,266 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.BeanException;
import rife.database.exceptions.DatabaseException;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* This class allows a {@link ResultSet} to be easily processed into bean
* instance.
* <p>Multiple instances can be collected into a list when processing an
* entire {@link ResultSet}, or as a single bean instance can be retrieved for
* one row of a {@link ResultSet}. The default behavior is to not collect
* instances.
*
* @author JR Boyens (jboyens[remove] at uwyn dot com)
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public class DbBeanFetcher<BeanType> extends DbRowProcessor {
private Datasource datasource_ = null;
private Class<BeanType> beanClass_ = null;
private BeanType lastBeanInstance_ = null;
private final HashMap<String, PropertyDescriptor> beanProperties_ = new HashMap<>();
private ArrayList<BeanType> collectedInstances_ = null;
/**
* Create a new DbBeanFetcher
*
* @param datasource the datasource to be used
* @param beanClass the type of bean that will be handled
* @throws BeanException thrown if there is an error getting
* information about the bean via the beanClass
* @since 1.0
*/
public DbBeanFetcher(Datasource datasource, Class<BeanType> beanClass)
throws BeanException {
this(datasource, beanClass, false);
}
/**
* Create a new DbBeanFetcher
*
* @param datasource the datasource to be used
* @param beanClass the type of bean that will be handled
* @param collectInstances <code>true</code> if the fetcher should
* collected the bean instances; <code>false</code> if otherwise
* @throws BeanException thrown if there is an error getting
* information about the bean via the beanClass
* @since 1.0
*/
public DbBeanFetcher(Datasource datasource, Class<BeanType> beanClass, boolean collectInstances)
throws BeanException {
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
if (null == beanClass) throw new IllegalArgumentException("beanClass can't be null.");
BeanInfo bean_info = null;
datasource_ = datasource;
beanClass_ = beanClass;
try {
bean_info = Introspector.getBeanInfo(beanClass);
} catch (IntrospectionException e) {
throw new BeanException("Couldn't introspect the bean with class '" + beanClass_.getName() + "'.", beanClass, e);
}
PropertyDescriptor[] bean_properties = bean_info.getPropertyDescriptors();
for (PropertyDescriptor bean_property : bean_properties) {
beanProperties_.put(bean_property.getName().toLowerCase(), bean_property);
}
if (collectInstances) {
collectedInstances_ = new ArrayList<BeanType>();
}
assert datasource_ != null;
assert beanClass_ != null;
assert null == lastBeanInstance_;
}
/**
* Process a ResultSet row into a bean. Call this method on a {@link
* ResultSet} and the resulting bean will be stored and be accessible
* via {@link #getBeanInstance()}
*
* @param resultSet the {@link ResultSet} from which to process the
* row
* @return <code>true</code> if a bean instance was retrieved; or
* <p><code>false</code> if otherwise
* @throws SQLException thrown when there is a problem processing
* the row
*/
public boolean processRow(ResultSet resultSet)
throws SQLException {
if (null == resultSet) throw new IllegalArgumentException("resultSet can't be null.");
BeanType instance = null;
try {
instance = beanClass_.newInstance();
} catch (InstantiationException e) {
SQLException e2 = new SQLException("Can't instantiate a bean with class '" + beanClass_.getName() + "' : " + e.getMessage());
e2.initCause(e);
throw e2;
} catch (IllegalAccessException e) {
SQLException e2 = new SQLException("No permission to instantiate a bean with class '" + beanClass_.getName() + "' : " + e.getMessage());
e2.initCause(e);
throw e2;
}
ResultSetMetaData meta = resultSet.getMetaData();
String column_name;
String column_label;
for (int i = 1; i <= meta.getColumnCount(); i++) {
column_name = meta.getColumnName(i).toLowerCase();
column_label = meta.getColumnLabel(i).toLowerCase();
if (beanProperties_.containsKey(column_name)) {
populateBeanProperty(instance, column_name, meta, resultSet, i);
} else if (beanProperties_.containsKey(column_label)) {
populateBeanProperty(instance, column_label, meta, resultSet, i);
}
}
lastBeanInstance_ = instance;
if (collectedInstances_ != null) {
collectedInstances_.add(instance);
}
return gotBeanInstance(instance);
}
private void populateBeanProperty(BeanType instance, String propertyName, ResultSetMetaData meta, ResultSet resultSet, int columnIndex)
throws SQLException {
PropertyDescriptor property = beanProperties_.get(propertyName);
Method write_method = property.getWriteMethod();
if (write_method != null) {
try {
int column_type = meta.getColumnType(columnIndex);
Object typed_object;
try {
typed_object = datasource_.getSqlConversion().getTypedObject(resultSet, columnIndex, column_type, property.getPropertyType());
} catch (DatabaseException e) {
SQLException e2 = new SQLException("Data conversion error while obtaining the typed object.");
e2.initCause(e);
throw e2;
}
// the sql conversion couldn't create a typed value
if (null == typed_object) {
// check if the object returned by the resultset is of the same type hierarchy as the property type
Object column_value = resultSet.getObject(columnIndex);
if (column_value != null &&
property.getPropertyType().isAssignableFrom(column_value.getClass())) {
typed_object = column_value;
}
// otherwise try to call the property type's constructor with a string argument
else {
String column_stringvalue = resultSet.getString(columnIndex);
if (column_stringvalue != null) {
try {
Constructor<?> constructor = property.getPropertyType().getConstructor(String.class);
if (constructor != null) {
typed_object = constructor.newInstance((Object[]) new String[]{column_stringvalue});
}
} catch (SecurityException e) {
instance = null;
SQLException e2 = new SQLException("No permission to obtain the String constructor of the property with name '" + property.getName() + "' and class '" + property.getPropertyType().getName() + "' of the bean with class '" + beanClass_.getName() + "'.");
e2.initCause(e);
throw e2;
} catch (NoSuchMethodException e) {
instance = null;
SQLException e2 = new SQLException("Couldn't find a String constructor for the property with name '" + property.getName() + "' and class '" + property.getPropertyType().getName() + "' of the bean with class '" + beanClass_.getName() + "'.");
e2.initCause(e);
throw e2;
} catch (InstantiationException e) {
instance = null;
SQLException e2 = new SQLException("Can't instantiate a new instance of the property with name '" + property.getName() + "' and class '" + property.getPropertyType().getName() + "' of the bean with class '" + beanClass_.getName() + "'.");
e2.initCause(e);
throw e2;
}
}
}
}
// if the typed object isn't null, set the value
if (typed_object != null) {
// stored the property type
write_method.invoke(instance, typed_object);
}
} catch (IllegalAccessException e) {
instance = null;
SQLException e2 = new SQLException("No permission to invoke the '" + write_method.getName() + "' method on the bean with class '" + beanClass_.getName() + "'.");
e2.initCause(e);
throw e2;
} catch (IllegalArgumentException e) {
instance = null;
SQLException e2 = new SQLException("Invalid arguments while invoking the '" + write_method.getName() + "' method on the bean with class '" + beanClass_.getName() + "'.");
e2.initCause(e);
throw e2;
} catch (InvocationTargetException e) {
instance = null;
SQLException e2 = new SQLException("The '" + write_method.getName() + "' method of the bean with class '" + beanClass_.getName() + "' has thrown an exception");
e2.initCause(e);
throw e2;
} catch (SQLException e) {
instance = null;
SQLException e2 = new SQLException("SQLException while invoking the '" + write_method.getName() + "' method of the bean with class '" + beanClass_.getName() + "'");
e2.initCause(e);
throw e2;
}
}
}
/**
* Hook method that can be overloaded to receive new bean instances as
* they are retrieved, without relying on the internal collection into
* a list.
*
* @param instance the received bean instance
* @return <code>true</code> if the bean fetcher should continue to
* retrieve the next bean; or
* <p><code>false</code> if the retrieval should stop after this bean
* @since 1.0
*/
public boolean gotBeanInstance(BeanType instance) {
return true;
}
/**
* Get the last processed bean instance
*
* @return the last processed bean instance
* @since 1.0
*/
public BeanType getBeanInstance() {
return lastBeanInstance_;
}
/**
* Get the collected bean instances
*
* @return the collected bean instances
* @since 1.0
*/
public List<BeanType> getCollectedInstances() {
return collectedInstances_;
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,105 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
/**
* By extending this class it's possible to provide the logic that should be
* executed by the {@link DbQueryManager#reserveConnection(DbConnectionUser)
* reserveConnection} method in the {@link DbQueryManager} class.
* <p>This class has both a default constructor and one that can take a data
* object. This can be handy when using it as an extending anonymous inner
* class when you need to use variables inside the inner class that are
* cumbersome to change to <code>final</code> in the enclosing class.
*
* @author JR Boyens (jboyens[remove] at uwyn dot com)
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see DbConnection
* @see DbQueryManager#reserveConnection(DbConnectionUser)
* @since 1.0
*/
import rife.tools.ExceptionUtils;
import rife.tools.InnerClassException;
import java.util.logging.Logger;
public abstract class DbConnectionUser<ResultType, DataType> implements Cloneable {
protected DataType data_ = null;
/**
* Create a new DbConnectionUser.
*
* @since 1.0
*/
public DbConnectionUser() {
}
/**
* Create a new DbConnectionUser with a data object.
*
* @param data a user data object to be stored locally
* @since 1.0
*/
public DbConnectionUser(DataType data) {
data_ = data;
}
/**
* Retrieve the data object that was provided to the constructor.
*
* @return the provided data object; or
* <p><code>null</code> if no data object was provided
* @since 1.0
*/
public DataType getData() {
return data_;
}
/**
* Calling this method makes it possible to throw a checked exception
* from within this class.
* <p>To catch it you should surround the {@link
* DbQueryManager#reserveConnection(DbConnectionUser)
* reserveConnection} with a <code>try-catch</code> block that catches
* <code>InnerClassException</code>. The original exception is then
* available through <code>getCause()</code> and can for example be
* rethrown.
*
* @param exception the exception to be thrown
* @exception InnerClassException when a checked exception needs to be
* thrown from within this class and caught outside the caller.
* @since 1.0
*/
public void throwException(Exception exception)
throws InnerClassException {
throw new InnerClassException(exception);
}
/**
* Should be implemented by all extending classes.
*
* @param connection the connection that should be used
* @since 1.0
*/
public abstract ResultType useConnection(DbConnection connection)
throws InnerClassException;
/**
* Simply clones the instance with the default clone method since this
* class contains no member variables.
*
* @since 1.0
*/
public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
// this should never happen
Logger.getLogger("rife.database").severe(ExceptionUtils.getExceptionStackTrace(e));
return null;
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,56 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
/**
* By extending this class it's possible to easily customize the behaviour of
* a large number of methods in the {@link DbQueryManager} class.
* <p>You're able to set the parameters of a {@link DbPreparedStatement}
* before the actual execution of any logic by overriding the {@link
* #setParameters(DbPreparedStatement) setParameters} method.
* <p>If you need to customize the entire query execution, you can override
* the {@link #performUpdate(DbPreparedStatement) performUpdate} and {@link
* #performQuery(DbPreparedStatement) performQuery} methods. Note that these
* methods are actually responsible for calling the {@link
* #setParameters(DbPreparedStatement) setParameters} method, so if you
* override them you either have to call this method yourself or include the
* code in the overridden method.
* <p>This class has both a default constructor and one that can take a data
* object. This can be handy when using it as an extending anonymous inner
* class when you need to use variables inside the inner class that are
* cumbersome to change to <code>final</code> in the enclosing class.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see DbPreparedStatement
* @see DbQueryManager
* @since 1.0
*/
public abstract class DbPreparedStatementHandler<DataType> extends DbResultSetHandler {
protected DataType data_ = null;
public DbPreparedStatementHandler() {
}
public DbPreparedStatementHandler(DataType data) {
data_ = data;
}
public DataType getData() {
return data_;
}
public void setParameters(DbPreparedStatement statement) {
}
public int performUpdate(DbPreparedStatement statement) {
setParameters(statement);
return statement.executeUpdate();
}
public void performQuery(DbPreparedStatement statement) {
setParameters(statement);
statement.executeQuery();
}
}

View file

@ -0,0 +1,117 @@
/*
* Copyright 2001-2008 Steven Grimm <koreth[remove] at midwinter dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
* $Id: DbConnection.java 3442 2006-08-10 09:26:43Z gbevin $
*/
package rife.database;
import rife.database.exceptions.DbQueryException;
import rife.scheduler.Executor;
import rife.scheduler.Task;
/**
* Periodic probe job to keep connections non-idle and probe for dead ones.
* This is primarily useful for MySQL, which closes connections after a
* period of inactivity.
*
* <p>This should be run using a scheduler participant. For example, to
* probe the "mysql" Datasource once a minute:
*
* <pre> &lt;scheduler&gt;
* &lt;task classname="rife.database.DbProbeExecutor"
* frequency="* * * * *"&gt;
* &lt;option name="datasource"&gt;mysql&lt;/option&gt;
* &lt;option name="query"&gt;select 1&lt;/option&gt;
* &lt;/task&gt;
* &lt;/scheduler&gt;</pre>
*
* <p>There are two optional parameters.
* <dl>
* <dt><code>datasource</code></dt>
* <dd>The name of the Datasource to probe. If not specified, the
* default is "datasource".</dd>
* <dt><code>query</code></td>
* <dd>The dummy query to send. If not specified, the default is
* "select 1".</dd>
* </dl>
*
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @version $Revision: $
* @since 1.6
*/
public class DbProbeExecutor extends Executor {
@Override
public boolean executeTask(Task task) {
try {
String ds_name = task.getTaskoptionValue("datasource");
if (null == ds_name) {
ds_name = "datasource";
}
String query = task.getTaskoptionValue("query");
if (null == query) {
query = "select 1";
}
Datasource ds = Datasources.instance().getDatasource(ds_name);
if (null == ds) {
throw new DbQueryException("Can't find Datasource '" + ds_name + "'");
}
ConnectionPool cp = ds.getPool();
if (null == cp) {
throw new DbQueryException("Datasource '" + ds_name + "' has no ConnectionPool");
}
/*
* Now fetch all the connections that should be in the pool,
* and run a dummy statement on each of them to keep it from
* going idle.
*
* This relies on the fact that ConnectionPool returns
* DbConnection objects in a round-robin fashion. We can just
* fetch the next connection the appropriate number of times
* and be guaranteed to hit all of them.
*
* If there are transactions active on other threads, we will
* not be given those threads' DbConnection objects, so we
* might end up being handed the same connection twice. No
* harm in that, and any connection that has an active
* transaction isn't idle anyway so doesn't need to be probed.
*/
synchronized (cp) {
for (int i = 0; i < cp.getPoolSize(); i++) {
DbConnection conn = ds.getConnection();
if (null == conn) {
throw new DbQueryException("Can't get connection");
}
DbPreparedStatement stmt = conn.getPreparedStatement(query);
if (null == stmt) {
throw new DbQueryException("Can't prepare dummy statement");
}
try {
/*
* Probe the connection. If this fails, RIFE will remove
* the connection from the pool automatically.
*/
stmt.executeQuery();
} finally {
stmt.close();
conn.close();
}
}
}
} catch (Exception e) {
throw new DbQueryException("Can't probe MySQL connection", e);
}
return true;
}
@Override
public String getHandledTasktype() {
return "DbProbe";
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,92 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import java.util.HashMap;
import java.util.Map;
/**
* This class is a simple cache for {@link DbQueryManager} objects. {@link
* DbQueryManager} objects are cached by their related {@link Datasource} and
* an identifier.
*
* @author JR Boyens (jboyens[remove] at uwyn dot com)
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public class DbQueryManagerCache {
private final Map<Datasource, HashMap<String, DbQueryManager>> cache_ = new HashMap<>();
/**
* Default constructor
*
* @since 1.0
*/
public DbQueryManagerCache() {
}
/**
* Retrieve a cached {@link DbQueryManager}
*
* @param datasource the {@link Datasource} associated with the
* desired {@link DbQueryManager}
* @param identifier the identifier associate with the desired {@link
* DbQueryManager}
* @return the cached {@link DbQueryManager}
* @since 1.0
*/
public DbQueryManager get(Datasource datasource, String identifier) {
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
if (null == identifier) throw new IllegalArgumentException("identifier can't be null.");
HashMap<String, DbQueryManager> dbquery_managers = null;
synchronized (cache_) {
dbquery_managers = cache_.get(datasource);
if (null == dbquery_managers) {
return null;
}
}
synchronized (dbquery_managers) {
return dbquery_managers.get(identifier);
}
}
/**
* Place a {@link DbQueryManager} in the cache
*
* @param datasource the {@link Datasource} associated with the {@link
* DbQueryManager} to put in the cache
* @param identifier the identifier associated with the {@link
* DbQueryManager} to put in the cache
* @param dbQueryManager the {@link DbQueryManager} to put in the
* cache
* @since 1.0
*/
public void put(Datasource datasource, String identifier, DbQueryManager dbQueryManager) {
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
if (null == identifier) throw new IllegalArgumentException("identifier can't be null.");
if (null == dbQueryManager) throw new IllegalArgumentException("dbQueryManager can't be null.");
HashMap<String, DbQueryManager> dbquery_managers;
synchronized (cache_) {
dbquery_managers = cache_.get(datasource);
if (null == dbquery_managers) {
dbquery_managers = new HashMap<>();
cache_.put(datasource, dbquery_managers);
}
}
synchronized (dbquery_managers) {
dbquery_managers.put(identifier, dbQueryManager);
}
assert cache_.containsKey(datasource);
assert cache_.get(datasource).containsKey(identifier);
assert cache_.get(datasource).get(identifier) == dbQueryManager;
}
}

View file

@ -0,0 +1,130 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.Datasource;
import rife.database.exceptions.UnsupportedJdbcDriverException;
import rife.tools.StringUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
/**
* This class allows for {@link DbQueryManager}s to be created more
* dynamically and with more features than by direct instantiation.
* <p>By using the <code>DbQueryManagerFactory</code>,
* <code>DbQueryManager</code> child classes can have custom methods that are
* implemented by different "drivers", based on the database software behind
* the {@link Datasource}. Database "drivers" are looked up through the
* manager's classpath according to the package name and the encoded class
* name of the JDBC driver (dots are replaced by underscores). The default, or
* "generic" driver, must be created under this package and will be used when
* no specific driver can be found for a particular <code>Datasource</code>.
* All the created DbQueryManagers are cached in the provided cache and are
* re-used on successive calls rather than being re-instantiated.
*
* @author JR Boyens (jboyens[remove] at uwyn dot com)
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public abstract class DbQueryManagerFactory {
private static final String GENERIC_DRIVER = "generic";
/**
* Get a <code>DbQueryManager</code> instance.
*
* @param managerPackageName the package name that corresponds to the
* location of the manager
* @param cache the cache to be used to cache the
* <code>DbQueryManager</code>s
* @param datasource the datasource to instantiate the
* <code>DbQueryManager</code> for
* @return the created <code>DbQueryManager</code> instance
* @since 1.0
*/
protected static DbQueryManager getInstance(String managerPackageName, DbQueryManagerCache cache, Datasource datasource) {
return getInstance(managerPackageName, cache, datasource, "");
}
/**
* Get a <code>DbQueryManager</code> instance.
*
* @param managerPackageName the package name that corresponds to the
* location of the manager
* @param cache the cache to be used to cache the
* <code>DbQueryManager</code>s
* @param datasource the datasource to instantiate the
* <code>DbQueryManager</code> for
* @param identifier the identifier to be used to uniquely identify
* this <code>DbQueryManager</code>
* @return the created <code>DbQueryManager</code> instance
* @since 1.0
*/
protected static DbQueryManager getInstance(String managerPackageName, DbQueryManagerCache cache, Datasource datasource, String identifier) {
if (null == managerPackageName) throw new IllegalArgumentException("managerPackageName can't be null.");
if (0 == managerPackageName.length()) throw new IllegalArgumentException("managerPackageName can't be empty.");
if (null == cache) throw new IllegalArgumentException("cache can't be null.");
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
if (null == identifier) throw new IllegalArgumentException("identifier can't be null.");
DbQueryManager dbquery_manager = null;
synchronized (cache) {
dbquery_manager = cache.get(datasource, identifier);
if (dbquery_manager != null) {
return dbquery_manager;
}
// construct an uniform package name
StringBuilder package_name = new StringBuilder(managerPackageName);
if (!managerPackageName.endsWith(".")) {
package_name.append(".");
}
// construct the specialized driver class name
StringBuilder specialized_name = new StringBuilder(package_name.toString());
String driver = datasource.getAliasedDriver();
specialized_name.append(StringUtils.encodeClassname(driver));
try {
try {
Class<DbQueryManager> specialized_class = (Class<DbQueryManager>) Class.forName(specialized_name.toString());
Constructor<DbQueryManager> specialized_constructor = specialized_class.getConstructor(Datasource.class);
dbquery_manager = specialized_constructor.newInstance(datasource);
} catch (ClassNotFoundException e) {
// could not find a specialized class, try to get a generic driver
try {
// construct the generic driver class name
StringBuilder generic_name = new StringBuilder(package_name.toString());
generic_name.append(GENERIC_DRIVER);
Class<DbQueryManager> generic_class = (Class<DbQueryManager>) Class.forName(generic_name.toString());
Constructor<DbQueryManager> generic_constructor = generic_class.getConstructor(Datasource.class);
dbquery_manager = generic_constructor.newInstance(datasource);
} catch (ClassNotFoundException e2) {
throw new UnsupportedJdbcDriverException(driver, e);
}
}
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | SecurityException e) {
throw new UnsupportedJdbcDriverException(driver, e);
} catch (InvocationTargetException e) {
if (e.getTargetException() != null) {
throw new RuntimeException(e.getTargetException());
} else {
throw new UnsupportedJdbcDriverException(driver, e);
}
}
cache.put(datasource, identifier, dbquery_manager);
}
assert datasource == dbquery_manager.getDatasource();
return dbquery_manager;
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,55 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.queries.Query;
import rife.tools.ExceptionUtils;
import java.sql.SQLException;
import java.util.logging.Logger;
/**
* By extending this class it's possible to easily customize the behaviour of
* some methods in the {@link DbQueryManager} class.
* <p>You're able to perform custom logic with the resultset of a query by
* overriding the {@link #concludeResults(DbResultSet) concludeResults} method
* and returning an object.
* <p>You're not supposed to close the resultset in this method.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see DbResultSet
* @see DbQueryManager
* @since 1.0
*/
public abstract class DbResultSetHandler implements Cloneable {
public DbStatement createStatement(DbConnection connection) {
return connection.createStatement();
}
public DbPreparedStatement getPreparedStatement(Query query, DbConnection connection) {
return connection.getPreparedStatement(query);
}
public Object concludeResults(DbResultSet resultset)
throws SQLException {
return null;
}
/**
* Simply clones the instance with the default clone method since this
* class contains no member variables.
*
* @since 1.0
*/
public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
// this should never happen
Logger.getLogger("rife.database").severe(ExceptionUtils.getExceptionStackTrace(e));
return null;
}
}
}

View file

@ -0,0 +1,125 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.DatabaseException;
import rife.database.exceptions.RowProcessorErrorException;
import rife.tools.ExceptionUtils;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Logger;
/**
* This abstract base class should be used to implement classes that process one
* row in a database query resulset. The <code>fetch</code> method of a
* <code>DbQueryManager</code> requires an instance of a
* <code>DbRowProcessor</code> and calls its <code>processRow</code>
* method each time it is called.
* <p>
* The <code>DbRowProcessor</code> instance can then work with the result set
* and extract all needed data. It is free to implement any logic to be
* able to return the retrieved data in an acceptable form to the user.
* <p>
* A class that extends <code>DbRowProcessor</code> can for example take a
* <code>Template</code> instance as the argument of its constructor and
* progressively fill in each resulting row in a HTML table. This, without
* having to maintain the query results in memory to be able to provide it to a
* seperate method which is responsible for the handling of the output. Using a
* <code>DbRowProcessor</code> thus allows for perfect seperation and
* abstraction of result processing without having to be burdened with possible
* large memory usage or large object allocation.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see #processRow(ResultSet resultSet)
* @see rife.database.DbQueryManager
* @since 1.0
*/
public abstract class DbRowProcessor implements Cloneable {
private boolean successful_ = false;
/**
* This method has to be implemented by each class that extends the
* <code>DbRowProcessor</code> class. It has to contain all the logic that
* should be executed for each row of a resultset.
*
* @param resultSet the <code>ResultSet</code> instance that was provided to
* the <code>DbQueryManager</code>'s <code>fetch</code> method.
* @return <code>true</code> if the processing is considered successful; or
* <p>
* <code>false</code> if the processing is considered failed.
* <p>
* Note: this return value is purely indicative and unless the user does
* checks with the <code>wasSuccessful()</code> method, it will have no
* influence on anything.
* @throws SQLException when a database error occurs, it's thus not
* necessary to catch all the possible <code>SQLException</code>s inside
* this method. They'll be caught higher up and be transformed in
* <code>DatabaseException</code>s.
* @see DbQueryManager#fetch(ResultSet, DbRowProcessor)
* @see #wasSuccessful()
* @since 1.0
*/
public abstract boolean processRow(ResultSet resultSet)
throws SQLException;
/**
* Indicates whether the processing of the row was successful.
*
* @return <code>true</code> if the processing was successful; or
* <p>
* <code>false</code> if the processing was unsuccessful.
* @since 1.0
*/
public final boolean wasSuccessful() {
return successful_;
}
/**
* This method wraps around the actual {@link #processRow(ResultSet)} method
* to ensure that the success status is reset at each iteration and that the
* possible <code>SQLException</code>s are caught correctly.
* <p>
* This is the method that's called internally by the <code>fetch()</code>
* method of a <code>DbQueryManager</code>. It is not meant to be used by
* the user.
*
* @param resultSet a <code>ResultSet</code> instance that was returned
* after a query's execution.
* @throws DatabaseException when a database access error occurred during
* the processing of the resultset row
* @see #processRow(ResultSet)
* @see DbQueryManager#fetch(ResultSet, DbRowProcessor)
* @since 1.0
*/
final void processRowWrapper(ResultSet resultSet)
throws DatabaseException {
if (null == resultSet) throw new IllegalArgumentException("resultSet can't be null.");
successful_ = false;
try {
successful_ = processRow(resultSet);
} catch (SQLException e) {
successful_ = false;
throw new RowProcessorErrorException(e);
}
}
/**
* Simply clones the instance with the default clone method since this
* class contains no object member variables.
*
* @since 1.0
*/
public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
// this should never happen
Logger.getLogger("rife.database").severe(ExceptionUtils.getExceptionStackTrace(e));
return null;
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,110 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.database.exceptions.RollbackException;
import rife.tools.ExceptionUtils;
import rife.tools.InnerClassException;
import java.util.logging.Logger;
/**
* By extending this class it's possible to provide the logic that should be
* executed by the {@link DbQueryManager#inTransaction(DbTransactionUser) inTransaction}
* method in the {@link DbQueryManager} class.
* <p>This class has both a default constructor and one that can take a data
* object. This can be handy when using it as an extending anonymous inner
* class when you need to use variables inside the inner class that are
* cumbersome to change to <code>final</code> in the enclosing class.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see DbQueryManager#inTransaction(DbTransactionUser)
* @since 1.0
*/
public abstract class DbTransactionUser<ResultType, DataType> implements Cloneable {
protected DataType data_ = null;
public DbTransactionUser() {
}
public DbTransactionUser(DataType data) {
data_ = data;
}
public DataType getData() {
return data_;
}
/**
* Should be overridden if the transaction has to be executed in another
* isolation level.
*
* @return <code>-1</code> when the active isolation level should be
* preserved; or
* <p>a level constant from {@link java.sql.Connection Connection} if the
* isolation needs to be changed.
* @since 1.0
*/
public int getTransactionIsolation() {
return -1;
}
/**
* Should be used to roll back ongoing transactions, otherwise enclosing
* transaction users might not be interrupted and subsequent modification
* can still happen outside the transaction.
*
* @throws RollbackException indicates that a rollback should happen
* and all further transaction logic interrupted.
* @since 1.0
*/
public void rollback()
throws RollbackException {
throw new RollbackException();
}
/**
* Calling this method makes it possible to throw a checked exception from
* within this class.
* <p>To catch it you should surround the {@link
* DbQueryManager#inTransaction(DbTransactionUser) inTransaction} with a
* <code>try-catch</code> block that catching
* <code>InnerClassException</code>. The original exception is then
* available through <code>getCause()</code> and can for example be
* rethrown.
*
* @throws InnerClassException when a checked exception needs to be
* thrown from within this class and caught outside the caller.
* @since 1.0
*/
public void throwException(Exception exception)
throws InnerClassException {
throw new InnerClassException(exception);
}
/**
* Should be implemented by all extending classes.
*
* @since 1.0
*/
public abstract ResultType useTransaction()
throws InnerClassException;
/**
* Simply clones the instance with the default clone method since this
* class contains no member variables.
*
* @since 1.0
*/
public Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
// this should never happen
Logger.getLogger("rife.database").severe(ExceptionUtils.getExceptionStackTrace(e));
return null;
}
}
}

View file

@ -0,0 +1,46 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import rife.tools.InnerClassException;
/**
* Convenience class that offers the same facilities as the
* <code>DbTransactionUser</code> class, but makes it easier to work with
* transactions that don't return any results.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @see DbTransactionUser
* @since 1.0
*/
public abstract class DbTransactionUserWithoutResult<DataType> extends DbTransactionUser<Object, DataType> {
public DbTransactionUserWithoutResult() {
}
public DbTransactionUserWithoutResult(DataType data) {
super(data);
}
/**
* Has been implemented to return a <code>null</code> reference and
* delegate the logic to the <code>useTransactionWithoutResult()</code>
* method.
*
* @since 1.0
*/
public Object useTransaction()
throws InnerClassException {
useTransactionWithoutResult();
return null;
}
/**
* Should be implemented by all extending classes.
*
* @since 1.0
*/
public abstract void useTransactionWithoutResult()
throws InnerClassException;
}

View file

@ -0,0 +1,98 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
import java.util.HashMap;
import java.util.Map;
import rife.database.queries.Query;
import rife.database.queries.QueryParameters;
/**
* Internal class to handle virtual parameters of a
* <code>DbPreparedStatement</code>.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public class VirtualParameters {
private QueryParameters parameters_ = null;
private Map<Integer, Integer> indexMapping_ = null;
private Map<Integer, Object> values_ = null;
private VirtualParametersHandler handler_ = null;
/**
* Creates a new <code>VirtualParameters</code> instance.
*
* @param parameters the actual parameters that are virtual.
* @param handler the <code>VirtualParametersHandler</code> that will
* be used by the {@link #callHandler(DbPreparedStatement)} method.
* @since 1.0
*/
public VirtualParameters(QueryParameters parameters, VirtualParametersHandler handler) {
if (null == parameters) throw new IllegalArgumentException("parameters can't be null.");
if (null == handler) throw new IllegalArgumentException("handler can't be null.");
parameters_ = parameters;
handler_ = handler;
}
/**
* Calls the registered <code>VirtualParametersHandler</code>. This is
* typically called when all virtual parameters have been defined in a
* prepared statement and the statement is ready to be executed.
*
* @param statement the prepared statement that has all the virtual
* parameters defined.
* @since 1.0
*/
public void callHandler(DbPreparedStatement statement) {
handler_.handleValues(statement);
}
void setup(Query query) {
indexMapping_ = query.getParameters().getVirtualIndexMapping(parameters_);
}
Object getValue(int index) {
if (null == values_) {
return null;
}
return values_.get(index);
}
boolean hasValue(int index) {
if (null == values_) {
return false;
}
return values_.containsKey(index);
}
boolean hasParameter(int index) {
if (null == indexMapping_) {
return false;
}
return indexMapping_.containsKey(index);
}
int getRealIndex(int index) {
if (null == indexMapping_) {
return -1;
}
return indexMapping_.get(index);
}
void putValue(int index, Object value) {
if (null == values_) {
values_ = new HashMap<Integer, Object>();
}
values_.put(index, value);
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database;
/**
* Internal interface that defines the methods that a
* <code>VirtualParameters</code> handler has to support.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @since 1.0
*/
public interface VirtualParametersHandler {
/**
* To whatever is needed according to the virtual parameters that have
* been defined in a prepared statement before execution.
*
* @param statement the prepared statement that has all the virtual
* parameters defined.
* @since 1.0
*/
public void handleValues(DbPreparedStatement statement);
}

View file

@ -0,0 +1,31 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.database.DbConnection;
import rife.database.DbPreparedStatement;
import rife.database.DbResultSet;
import rife.database.DbResultSetHandler;
import rife.database.exceptions.DatabaseException;
import rife.database.queries.Query;
public class AbstractCapabilitiesCompensator implements CapabilitiesCompensator {
public DbPreparedStatement getCapablePreparedStatement(Query query, DbResultSetHandler handler, DbConnection connection)
throws DatabaseException {
query.setExcludeUnsupportedCapabilities(true);
if (null == handler) {
return connection.getPreparedStatement(query);
}
return handler.getPreparedStatement(query, connection);
}
public DbResultSet getCapableResultSet(DbPreparedStatement statement)
throws DatabaseException {
return statement.getResultSet();
}
}

View file

@ -0,0 +1,15 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import java.io.Serial;
import java.util.HashMap;
public class Capabilities extends HashMap<Capability, Object> {
@Serial private static final long serialVersionUID = -3879196202222749203L;
public Capabilities() {
}
}

View file

@ -0,0 +1,21 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.database.DbConnection;
import rife.database.DbPreparedStatement;
import rife.database.DbResultSet;
import rife.database.DbResultSetHandler;
import rife.database.exceptions.DatabaseException;
import rife.database.queries.Query;
public interface CapabilitiesCompensator {
public DbPreparedStatement getCapablePreparedStatement(Query query, DbResultSetHandler handler, DbConnection connection)
throws DatabaseException;
public DbResultSet getCapableResultSet(DbPreparedStatement statement)
throws DatabaseException;
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.datastructures.*;
public class Capability extends EnumClass<String> {
public static final Capability LIMIT = new Capability("LIMIT");
public static final Capability LIMIT_PARAMETER = new Capability("LIMIT_PARAMETER");
public static final Capability OFFSET = new Capability("OFFSET");
public static final Capability OFFSET_PARAMETER = new Capability("OFFSET_PARAMETER");
Capability(String identifier) {
super(identifier);
}
public static Capability getMethod(String name) {
return getMember(Capability.class, name);
}
}

View file

@ -0,0 +1,134 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.database.DbPreparedStatement;
import rife.database.DbResultSet;
import rife.database.VirtualParameters;
import rife.database.VirtualParametersHandler;
import rife.database.exceptions.DatabaseException;
import rife.database.queries.Query;
import rife.database.queries.QueryParameterType;
import rife.database.queries.QueryParameters;
import java.sql.SQLException;
public class LimitOffsetCompensator implements VirtualParametersHandler {
private boolean useRelativeForScrolling_ = false;
public void setUseRelativeForScrolling(boolean useRelativeForCursors) {
useRelativeForScrolling_ = useRelativeForCursors;
}
public void handleCapablePreparedStatement(DbPreparedStatement statement)
throws DatabaseException {
Query query = statement.getQuery();
if (query != null) {
// obtain capabilities
Capabilities capabilities = query.getCapabilities();
if (capabilities != null) {
// handle limit and offset capabilities
if (capabilities.containsKey(Capability.LIMIT)) {
// limit the fetch size of the resultset
int max_rows = 0;
int limit = (Integer) capabilities.get(Capability.LIMIT);
statement.setFetchSize(limit);
max_rows += limit;
// limit the maximum number of rows
if (capabilities.containsKey(Capability.OFFSET)) {
max_rows += (Integer) capabilities.get(Capability.OFFSET);
}
if (max_rows != 0) {
statement.setMaxRows(max_rows);
}
}
// handle limit and offset parameter capabilities
else if (capabilities.containsKey(Capability.LIMIT_PARAMETER)) {
QueryParameters parameters = query.getParameters();
if (parameters != null) {
QueryParameters virtual_query_parameters = parameters.getNewInstance();
virtual_query_parameters.addTypedParameter(QueryParameterType.LIMIT, (String) capabilities.get(Capability.LIMIT_PARAMETER));
if (capabilities.containsKey(Capability.OFFSET_PARAMETER)) {
virtual_query_parameters.addTypedParameter(QueryParameterType.OFFSET, (String) capabilities.get(Capability.OFFSET_PARAMETER));
}
VirtualParameters virtual_parameters = new VirtualParameters(virtual_query_parameters, this);
statement.setVirtualParameters(virtual_parameters);
}
}
}
}
}
public void handleValues(DbPreparedStatement statement)
throws DatabaseException {
Query query = statement.getQuery();
if (query != null) {
// obtain capabilities
Capabilities capabilities = query.getCapabilities();
if (capabilities != null) {
// handle limit and offset capabilities
if (capabilities.containsKey(Capability.LIMIT_PARAMETER)) {
// limit the fetch size of the resultset
int max_rows = 0;
String limit_parameter_name = (String) capabilities.get(Capability.LIMIT_PARAMETER);
int limit = Integer.parseInt(String.valueOf(statement.getVirtualParameterValue(limit_parameter_name)));
statement.setFetchSize(limit);
max_rows += limit;
// limit the maximum number of rows
if (capabilities.containsKey(Capability.OFFSET_PARAMETER)) {
String offset_parameter_name = (String) capabilities.get(Capability.OFFSET_PARAMETER);
int offset = Integer.parseInt(String.valueOf(statement.getVirtualParameterValue(offset_parameter_name)));
max_rows += offset;
}
if (max_rows != 0) {
statement.setMaxRows(max_rows);
}
}
}
}
}
public void handleCapableResultSet(DbPreparedStatement statement)
throws DatabaseException {
DbResultSet resultset = statement.getResultSet();
// obtain capabilities
Capabilities capabilities = statement.getQuery().getCapabilities();
if (capabilities != null &&
(capabilities.containsKey(Capability.LIMIT) ||
capabilities.containsKey(Capability.LIMIT_PARAMETER))) {
int offset = -1;
// handle limit and offset capabilities
if (capabilities.containsKey(Capability.OFFSET)) {
offset = (Integer) capabilities.get(Capability.OFFSET);
} else if (capabilities.containsKey(Capability.OFFSET_PARAMETER)) {
String parameter_name = (String) capabilities.get(Capability.OFFSET_PARAMETER);
offset = Integer.parseInt(String.valueOf(statement.getVirtualParameterValue(parameter_name)));
}
// apply the offset
if (offset > 0) {
try {
if (useRelativeForScrolling_) {
resultset.relative(offset);
} else {
while (offset > 0) {
resultset.next();
offset--;
}
}
} catch (SQLException e) {
throw new DatabaseException(e);
}
}
}
}
}

View file

@ -0,0 +1,9 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
public class com_mysql_cj_jdbc_Driver extends AbstractCapabilitiesCompensator {
}

View file

@ -0,0 +1,41 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.database.DbConnection;
import rife.database.DbPreparedStatement;
import rife.database.DbResultSet;
import rife.database.DbResultSetHandler;
import rife.database.exceptions.DatabaseException;
import rife.database.queries.Query;
public class oracle_jdbc_driver_OracleDriver extends AbstractCapabilitiesCompensator {
private LimitOffsetCompensator mLimitOffsetCompensator = new LimitOffsetCompensator();
public DbPreparedStatement getCapablePreparedStatement(Query query, DbResultSetHandler handler, DbConnection connection)
throws DatabaseException {
query.setExcludeUnsupportedCapabilities(true);
// either create a new prepared statement or get it from the handler
DbPreparedStatement statement = null;
if (null == handler) {
statement = connection.getPreparedStatement(query);
} else {
statement = handler.getPreparedStatement(query, connection);
}
mLimitOffsetCompensator.handleCapablePreparedStatement(statement);
return statement;
}
public DbResultSet getCapableResultSet(DbPreparedStatement statement)
throws DatabaseException {
mLimitOffsetCompensator.handleCapableResultSet(statement);
return statement.getResultSet();
}
}

View file

@ -0,0 +1,41 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
import rife.database.DbConnection;
import rife.database.DbPreparedStatement;
import rife.database.DbResultSet;
import rife.database.DbResultSetHandler;
import rife.database.exceptions.DatabaseException;
import rife.database.queries.Query;
public class org_apache_derby_jdbc_EmbeddedDriver extends AbstractCapabilitiesCompensator {
private LimitOffsetCompensator mLimitOffsetCompensator = new LimitOffsetCompensator();
public DbPreparedStatement getCapablePreparedStatement(Query query, DbResultSetHandler handler, DbConnection connection)
throws DatabaseException {
query.setExcludeUnsupportedCapabilities(true);
// either create a new prepared statement or get it from the handler
DbPreparedStatement statement = null;
if (null == handler) {
statement = connection.getPreparedStatement(query);
} else {
statement = handler.getPreparedStatement(query, connection);
}
mLimitOffsetCompensator.handleCapablePreparedStatement(statement);
return statement;
}
public DbResultSet getCapableResultSet(DbPreparedStatement statement)
throws DatabaseException {
mLimitOffsetCompensator.handleCapableResultSet(statement);
return statement.getResultSet();
}
}

View file

@ -0,0 +1,9 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
public class org_h2_Driver extends AbstractCapabilitiesCompensator {
}

View file

@ -0,0 +1,9 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
public class org_hsqldb_jdbcDriver extends AbstractCapabilitiesCompensator {
}

View file

@ -0,0 +1,9 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.capabilities;
public class org_postgresql_Driver extends AbstractCapabilitiesCompensator {
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class BatchExecutionErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = 7946011449481688333L;
private final Datasource datasource_;
public BatchExecutionErrorException(Datasource datasource, Throwable cause) {
super("Error while executing the batch sql commands.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,27 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class BeanException extends DbQueryException {
@Serial private static final long serialVersionUID = 7745938017589820114L;
private final Class bean_;
public BeanException(String message, Class bean) {
super(message);
bean_ = bean;
}
public BeanException(String message, Class bean, Throwable cause) {
super(message, cause);
bean_ = bean;
}
public Class getBean() {
return bean_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class ColumnsRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = 6643369478401322040L;
private final String queryName_;
public ColumnsRequiredException(String queryName) {
super(queryName + " queries require columns.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class ConnectionCloseErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = 8592248767491353911L;
private final Datasource datasource_;
public ConnectionCloseErrorException(Datasource datasource, Throwable cause) {
super("Couldn't properly close the connection with url '" + datasource.getUrl() + "'.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class ConnectionMetaDataErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = 8314476636892309174L;
private final Datasource datasource_;
public ConnectionMetaDataErrorException(Datasource datasource, Throwable cause) {
super("Error while obtaining the metadata of the connection with url '" + datasource.getUrl() + "'.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,41 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class ConnectionOpenErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -8963881858111262119L;
private final String url_;
private final String user_;
private final String password_;
public ConnectionOpenErrorException(String url, Throwable cause) {
super("Couldn't connect to the database with connection url '" + url + "'.", cause);
url_ = url;
user_ = null;
password_ = null;
}
public ConnectionOpenErrorException(String url, String user, String password, Throwable cause) {
super("Couldn't connect to the database with connection url '" + url + "'.", cause);
url_ = url;
user_ = user;
password_ = password;
}
public String getUrl() {
return url_;
}
public String getUser() {
return user_;
}
public String getPassword() {
return password_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class ConnectionStatusErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -6733548295573208721L;
private final Datasource datasource_;
public ConnectionStatusErrorException(Datasource datasource, Throwable cause) {
super("Error while checking the status of the connection with url '" + datasource.getUrl() + "'.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class DatabaseException extends RuntimeException {
@Serial private static final long serialVersionUID = -8915821806051354310L;
public DatabaseException(String message) {
super(message);
}
public DatabaseException(String message, Throwable cause) {
super(message, cause);
}
public DatabaseException(Throwable cause) {
super(cause);
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class DatasourcesException extends DatabaseException {
@Serial private static final long serialVersionUID = 6060312635494918174L;
public DatasourcesException(String message) {
super(message);
}
public DatasourcesException(Throwable cause) {
super(cause);
}
public DatasourcesException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -0,0 +1,19 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class DbQueryException extends RuntimeException {
@Serial private static final long serialVersionUID = -2143066136860048063L;
public DbQueryException(String message) {
super(message);
}
public DbQueryException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -0,0 +1,22 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class DriverInstantiationErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -8357643089890580253L;
private final String driver_;
public DriverInstantiationErrorException(String driver, Throwable cause) {
super("Couldn't instantiate the JDBC driver '" + driver + "'.", cause);
driver_ = driver;
}
public String getDriver() {
return driver_;
}
}

View file

@ -0,0 +1,15 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class DriverNameRetrievalErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -2809651374321112986L;
public DriverNameRetrievalErrorException(Throwable cause) {
super("Unexpected error while retrieving the driver name of a JDBC connection.", cause);
}
}

View file

@ -0,0 +1,30 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class ExecutionErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = 4317171502649179520L;
private final String sql_;
private final Datasource datasource_;
public ExecutionErrorException(String sql, Datasource datasource, Throwable cause) {
super("Error while executing the SQL '" + sql + "'.", cause);
sql_ = sql;
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
public String getSql() {
return sql_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class FieldsRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = 5937549014842696343L;
private String queryName_;
public FieldsRequiredException(String queryName) {
super(queryName + " queries require fields.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,29 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class MissingManyToOneColumnException extends DatabaseException {
@Serial private static final long serialVersionUID = 1390166791727531269L;
private final Class constrainedClass_;
private final String propertyName_;
public MissingManyToOneColumnException(Class constrainedClass, String propertyName) {
super("The property '" + propertyName + "' of '" + constrainedClass.getName() + "' has a manyToOne constraint, however the column of the associated table is missing. This can be provided when the constraint is declared.");
constrainedClass_ = constrainedClass;
propertyName_ = propertyName;
}
public Class getConstrainedClass() {
return constrainedClass_;
}
public String getPropertyName() {
return propertyName_;
}
}

View file

@ -0,0 +1,29 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class MissingManyToOneTableException extends DatabaseException {
@Serial private static final long serialVersionUID = 9024147800617136452L;
private Class constrainedClass_;
private String propertyName_;
public MissingManyToOneTableException(Class constrainedClass, String propertyName) {
super("The property '" + propertyName + "' of '" + constrainedClass.getName() + "' has a manyToOne constraint, however the associated table name is missing. This can be provided by giving either the table name or the associated class when the constraint is declared.");
constrainedClass_ = constrainedClass;
propertyName_ = propertyName;
}
public Class getConstrainedClass() {
return constrainedClass_;
}
public String getPropertyName() {
return propertyName_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class MissingResultsException extends DatabaseException {
@Serial private static final long serialVersionUID = 8032678779633066395L;
private final Datasource datasource_;
public MissingResultsException(Datasource datasource) {
super("Trying to fetch result from datasource '" + datasource.getUrl() + "' while no results are available.");
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.DbPreparedStatement;
import java.io.Serial;
public class NoParametersException extends DatabaseException {
@Serial private static final long serialVersionUID = -2087220322509692913L;
private final DbPreparedStatement preparedStatement_;
public NoParametersException(DbPreparedStatement statement) {
super("The statement with sql '" + statement.getSql() + "' doesn't contain any parameters.");
preparedStatement_ = statement;
}
public DbPreparedStatement getPreparedStatement() {
return preparedStatement_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.DbPreparedStatement;
import java.io.Serial;
public class NoParametrizedQueryException extends DatabaseException {
@Serial private static final long serialVersionUID = -1606716036753773612L;
private final DbPreparedStatement preparedStatement_;
public NoParametrizedQueryException(DbPreparedStatement statement) {
super("The statement with sql '" + statement.getSql() + "' doesn't contain a parametrized query.");
preparedStatement_ = statement;
}
public DbPreparedStatement getPreparedStatement() {
return preparedStatement_;
}
}

View file

@ -0,0 +1,30 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.DbPreparedStatement;
import java.io.Serial;
public class ParameterDoesntExistException extends DatabaseException {
@Serial private static final long serialVersionUID = -5547694215702755839L;
private final DbPreparedStatement preparedStatement_;
private final String parameterName_;
public ParameterDoesntExistException(DbPreparedStatement statement, String parameterName) {
super("The statement with sql '" + statement.getSql() + "' doesn't contain the parameter '" + parameterName + "'.");
preparedStatement_ = statement;
parameterName_ = parameterName;
}
public DbPreparedStatement getPreparedStatement() {
return preparedStatement_;
}
public String getParameterName() {
return parameterName_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class PreparedStatementCreationErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = 527710892636948049L;
private final Datasource datasource_;
public PreparedStatementCreationErrorException(Datasource datasource, Throwable cause) {
super("Couldn't create a new prepared statement.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class RollbackException extends DatabaseException
{
@Serial private static final long serialVersionUID = -8696265689207175989L;
public RollbackException()
{
super("Causes a transaction user to trigger a rollback.");
}
}

View file

@ -0,0 +1,18 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
import java.sql.SQLException;
public class RowIndexOutOfBoundsException extends SQLException
{
@Serial private static final long serialVersionUID = 3132609745592263804L;
public RowIndexOutOfBoundsException()
{
super("Row index out of bounds.");
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class RowProcessorErrorException extends DatabaseException
{
@Serial private static final long serialVersionUID = -5597696130038426852L;
public RowProcessorErrorException(Throwable cause)
{
super("An error occurred while processing a resultset row.", cause);
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class SequenceNameRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = -1117694732120142775L;
private String queryName_;
public SequenceNameRequiredException(String queryName) {
super(queryName + " queries require a sequence name.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class SequenceOperationRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = -4800820909278366194L;
private final String queryName_;
public SequenceOperationRequiredException(String queryName) {
super(queryName + " queries require a sequence operation to be provided.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class StatementCloseErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -4874100206556310884L;
private final Datasource datasource_;
public StatementCloseErrorException(Datasource datasource, Throwable cause) {
super("Couldn't close the statement.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class StatementCreationErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -1239650169811910189L;
private final Datasource datasource_;
public StatementCreationErrorException(Datasource datasource, Throwable cause) {
super("Couldn't create a new statement.", cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class TableNameOrFieldsRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = -1252775241150915434L;
private final String queryName_;
public TableNameOrFieldsRequiredException(String queryName) {
super(queryName + " queries require a table name or fields.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class TableNameRequiredException extends DbQueryException {
@Serial private static final long serialVersionUID = 5815362326172483731L;
private final String queryName_;
public TableNameRequiredException(String queryName) {
super(queryName + " queries require a table name.");
queryName_ = queryName;
}
public String getQueryName() {
return queryName_;
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionBeginErrorException extends TransactionErrorException {
@Serial private static final long serialVersionUID = -75164107264303943L;
public TransactionBeginErrorException(Datasource datasource, Throwable cause) {
super("Error while beginning the transaction.", datasource, cause);
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionCommitErrorException extends TransactionErrorException {
@Serial private static final long serialVersionUID = -2362784873041828108L;
public TransactionCommitErrorException(Datasource datasource, Throwable cause) {
super("Error while committing transaction.", datasource, cause);
}
}

View file

@ -0,0 +1,24 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionErrorException extends DatabaseException {
@Serial private static final long serialVersionUID = -5022112556565975376L;
private Datasource datasource_;
TransactionErrorException(String action, Datasource datasource, Throwable cause) {
super(action, cause);
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
}

View file

@ -0,0 +1,21 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionRollbackErrorException extends TransactionErrorException {
public TransactionRollbackErrorException(String action, Datasource datasource, Throwable cause) {
super(action, datasource, cause);
}
@Serial private static final long serialVersionUID = 2809082434948067632L;
public TransactionRollbackErrorException(Datasource datasource, Throwable cause) {
super("Error while rolling back the transaction.", datasource, cause);
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionSupportCheckErrorException extends TransactionErrorException {
@Serial private static final long serialVersionUID = 2834697164959844045L;
public TransactionSupportCheckErrorException(Datasource datasource, Throwable cause) {
super("Error while checking the transaction support.", datasource, cause);
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.Datasource;
import java.io.Serial;
public class TransactionTimedOutException extends TransactionErrorException {
@Serial private static final long serialVersionUID = 6277363843403636905L;
public TransactionTimedOutException(Datasource datasource) {
super("The transaction timed out.", datasource, null);
}
}

View file

@ -0,0 +1,43 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.DbPreparedStatement;
import java.io.Serial;
public class UndefinedVirtualParameterException extends DatabaseException {
@Serial private static final long serialVersionUID = -7004752430133818652L;
private final DbPreparedStatement preparedStatement_;
private final String parameterName_;
private final int parameterIndex_;
public UndefinedVirtualParameterException(DbPreparedStatement statement, String parameterName) {
super("The statement with sql '" + statement.getSql() + "' requires the definition of a value for the virtual parameter with name '" + parameterName + "'.");
preparedStatement_ = statement;
parameterName_ = parameterName;
parameterIndex_ = -1;
}
public UndefinedVirtualParameterException(DbPreparedStatement statement, int parameterIndex) {
super("The statement with sql '" + statement.getSql() + "' requires the definition of a value for the virtual parameter with index '" + parameterIndex + "'.");
preparedStatement_ = statement;
parameterName_ = null;
parameterIndex_ = parameterIndex;
}
public DbPreparedStatement getPreparedStatement() {
return preparedStatement_;
}
public String getParameterName() {
return parameterName_;
}
public int getParameterIndex() {
return parameterIndex_;
}
}

View file

@ -0,0 +1,22 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class UnsupportedDriverNameException extends DatabaseException {
@Serial private static final long serialVersionUID = 6993103229317879655L;
private final String name_;
public UnsupportedDriverNameException(String name) {
super("Couldn't find a supported driver class for the driver name '" + name + "'.");
name_ = name;
}
public String getName() {
return name_;
}
}

View file

@ -0,0 +1,22 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class UnsupportedJdbcDriverException extends RuntimeException {
@Serial private static final long serialVersionUID = 664475636733401910L;
private final String driver_;
public UnsupportedJdbcDriverException(String driver, Throwable cause) {
super("The JDBC driver '" + driver + "' isn't supported, certain functionalities will not function correctly.", cause);
driver_ = driver;
}
public String getDriver() {
return driver_;
}
}

View file

@ -0,0 +1,28 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import java.io.Serial;
public class UnsupportedSqlFeatureException extends DbQueryException {
@Serial private static final long serialVersionUID = 6597682956243876788L;
private final String feature_;
private final String driver_;
public UnsupportedSqlFeatureException(String feature, String driver) {
super("The '" + feature + "' feature isn't supported by the driver '" + driver + "'.");
feature_ = feature;
driver_ = driver;
}
public String getFeature() {
return feature_;
}
public String getDriver() {
return driver_;
}
}

View file

@ -0,0 +1,36 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.exceptions;
import rife.database.DbPreparedStatement;
import java.io.Serial;
public class UnsupportedVirtualParameterTypeException extends DatabaseException {
@Serial private static final long serialVersionUID = -4366883446335774838L;
private final DbPreparedStatement preparedStatement_;
private final int parameterIndex_;
private final String valueType_;
public UnsupportedVirtualParameterTypeException(DbPreparedStatement statement, int parameterIndex, String valueType) {
super("The statement with sql '" + statement.getSql() + "' doesn't support the value type '" + valueType + "' for the virtual parameter with index '" + parameterIndex + "'.");
preparedStatement_ = statement;
parameterIndex_ = parameterIndex;
valueType_ = valueType;
}
public DbPreparedStatement getPreparedStatement() {
return preparedStatement_;
}
public int getParameterIndex() {
return parameterIndex_;
}
public String getValueType() {
return valueType_;
}
}

View file

@ -0,0 +1,7 @@
<html>
<body>
Provides exception classes for the database framework.
</body>
</html>

View file

@ -0,0 +1,17 @@
<html>
<body>
Provides classes and interfaces for the object-oriented query builders, database abstraction layer, persistance manager, query handling templates, fault-tolerant JDBC wrappers and connection pooling.
<h2>Related Documentation</h2>
For overviews, tutorials, examples, guides, and documentation, please see:
<ul>
<li><a href="http://rifers.org/docs/usersguide/ch07.html">[User's guide] Chapter 7. Adding database support</a></li>
<li><a href="http://rifers.org/docs/usersguide/ch10.html">[User's guide] Chapter 10. Cookbook - database</a></li>
<li><a href="http://rifers.org/wiki/display/RIFE/Database">[Wiki] Database</a></li>
<li><a href="http://rifers.org/examples/#D.5">[Examples] Friends listing</a></li>
</ul>
</body>
</html>

View file

@ -0,0 +1,147 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import java.util.List;
abstract class AbstractParametrizedQuery extends AbstractQuery implements Query, Cloneable {
private QueryParameters parameters_ = null;
protected AbstractParametrizedQuery(Datasource datasource) {
super(datasource);
}
public void clear() {
super.clear();
if (parameters_ != null) {
parameters_.clear();
}
}
private void addTypedParameters(QueryParameterType type, QueryParameters parameters) {
if (null == parameters) {
return;
}
addTypedParameters(type, parameters.getOrderedNames());
}
private void addTypedParameters(QueryParameterType type, List<String> parameters) {
if (null == parameters_) {
parameters_ = new QueryParameters(this);
}
parameters_.addTypedParameters(type, parameters);
}
private void addTypedParameter(QueryParameterType type, String parameter) {
if (null == parameters_) {
parameters_ = new QueryParameters(this);
}
parameters_.addTypedParameter(type, parameter);
}
private <T> T getTypedParameters(QueryParameterType type) {
if (null == parameters_) {
return null;
}
return (T) parameters_.getTypedParameters(type);
}
private void clearTypedParameters(QueryParameterType type) {
if (null == parameters_) {
return;
}
parameters_.clearTypedParameters(type);
if (0 == parameters_.getNumberOfTypes()) {
parameters_ = null;
}
}
protected void _fieldSubselect(Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
addTypedParameters(QueryParameterType.FIELD, query.getParameters());
}
protected void _tableSubselect(Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
addTypedParameters(QueryParameterType.TABLE, query.getParameters());
}
protected void _whereSubselect(Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
addTypedParameters(QueryParameterType.WHERE, query.getParameters());
}
protected void _unionSubselect(Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
addTypedParameters(QueryParameterType.UNION, query.getParameters());
}
public QueryParameters getParameters() {
return parameters_;
}
protected void addFieldParameter(String field) {
addTypedParameter(QueryParameterType.FIELD, field);
}
protected void clearWhereParameters() {
clearTypedParameters(QueryParameterType.WHERE);
}
protected void addWhereParameter(String field) {
addTypedParameter(QueryParameterType.WHERE, field);
}
protected List<String> getWhereParameters() {
return getTypedParameters(QueryParameterType.WHERE);
}
public void addWhereParameters(List<String> parameters) {
addTypedParameters(QueryParameterType.WHERE, parameters);
}
protected void setLimitParameter(String limitParameter) {
addTypedParameter(QueryParameterType.LIMIT, limitParameter);
}
public String getLimitParameter() {
return getTypedParameters(QueryParameterType.LIMIT);
}
protected void setOffsetParameter(String offsetParameter) {
addTypedParameter(QueryParameterType.OFFSET, offsetParameter);
}
public String getOffsetParameter() {
return getTypedParameters(QueryParameterType.OFFSET);
}
protected boolean isLimitBeforeOffset() {
return true;
}
public AbstractParametrizedQuery clone() {
AbstractParametrizedQuery new_instance = (AbstractParametrizedQuery) super.clone();
if (new_instance != null &&
parameters_ != null) {
new_instance.parameters_ = parameters_.clone();
}
return new_instance;
}
}

View file

@ -0,0 +1,58 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
abstract class AbstractQuery implements Query, Cloneable {
protected Datasource datasource_ = null;
protected String sql_ = null;
protected boolean excludeUnsupportedCapabilities_ = false;
private AbstractQuery() {
}
protected AbstractQuery(Datasource datasource) {
assert datasource != null;
datasource_ = datasource;
}
public Datasource getDatasource() {
return datasource_;
}
public QueryParameters getParameters() {
return null;
}
public void setExcludeUnsupportedCapabilities(boolean flag) {
excludeUnsupportedCapabilities_ = flag;
}
public void clear() {
sql_ = null;
}
protected void clearGenerated() {
sql_ = null;
}
public String toString() {
return getSql();
}
public AbstractQuery clone() {
AbstractQuery new_instance = null;
try {
new_instance = (AbstractQuery) super.clone();
} catch (CloneNotSupportedException e) {
new_instance = null;
}
return new_instance;
}
}

View file

@ -0,0 +1,320 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com> and
* JR Boyens <gnu-jrb[remove] at gmx dot net>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.exceptions.DbQueryException;
import java.util.List;
public abstract class AbstractWhereDelegateQuery<QueryType extends AbstractWhereDelegateQuery, DelegateType extends AbstractWhereQuery> implements WhereQuery<QueryType> {
protected DelegateType delegate_ = null;
protected AbstractWhereDelegateQuery(DelegateType delegate) {
delegate_ = delegate;
}
public DelegateType getDelegate() {
return delegate_;
}
public Datasource getDatasource() {
return delegate_.getDatasource();
}
public WhereGroup<QueryType> startWhere() {
return new WhereGroup<QueryType>(getDatasource(), this);
}
public WhereGroupAnd<QueryType> startWhereAnd() {
return new WhereGroupAnd<QueryType>(getDatasource(), this);
}
public WhereGroupOr<QueryType> startWhereOr() {
return new WhereGroupOr<QueryType>(getDatasource(), this);
}
public QueryType where(String where) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(where);
} else {
delegate_.where(where);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, boolean value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, byte value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, char value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, double value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, float value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, int value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, long value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, Select query) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, query);
} else {
delegate_.where(field, operator, query);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, Object value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType where(String field, String operator, short value) {
if (delegate_.getWhere().length() > 0) {
delegate_.whereAnd(field, operator, value);
} else {
delegate_.where(field, operator, value);
}
return (QueryType) this;
}
public QueryType whereAnd(String where) {
delegate_.whereAnd(where);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, boolean value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, byte value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, char value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, double value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, float value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, int value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, long value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, Select query) {
delegate_.whereAnd(field, operator, query);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, Object value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, short value) {
delegate_.whereAnd(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String where) {
delegate_.whereOr(where);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, boolean value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, byte value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, char value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, double value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, float value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, int value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, long value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, Select query) {
delegate_.whereOr(field, operator, query);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, Object value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, short value) {
delegate_.whereOr(field, operator, value);
return (QueryType) this;
}
public QueryType whereSubselect(Select query) {
delegate_.whereSubselect(query);
return (QueryType) this;
}
public QueryType where(Object bean)
throws DbQueryException {
delegate_.where(bean);
return (QueryType) this;
}
public QueryType whereIncluded(Object bean, String[] includedFields)
throws DbQueryException {
delegate_.whereIncluded(bean, includedFields);
return (QueryType) this;
}
public QueryType whereExcluded(Object bean, String[] excludedFields)
throws DbQueryException {
delegate_.whereExcluded(bean, excludedFields);
return (QueryType) this;
}
public QueryType whereFiltered(Object bean, String[] includedFields, String[] excludedFields)
throws DbQueryException {
delegate_.whereFiltered(bean, includedFields, excludedFields);
return (QueryType) this;
}
public void addWhereParameters(List<String> parameters) {
delegate_.addWhereParameters(parameters);
}
}

View file

@ -0,0 +1,34 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
public abstract class AbstractWhereGroup<ParentType extends WhereQuery>
extends AbstractWhereQuery<AbstractWhereGroup<ParentType>>
implements Cloneable {
protected WhereQuery parent_ = null;
protected AbstractWhereGroup(Datasource datasource, WhereQuery parent) {
super(datasource);
parent_ = parent;
}
public ParentType end() {
parent_.whereAnd("(" + getSql() + ")");
parent_.addWhereParameters(getWhereParameters());
return (ParentType) parent_;
}
public String getSql() {
return where_.toString();
}
public AbstractWhereGroup<ParentType> clone() {
return (AbstractWhereGroup<ParentType>) super.clone();
}
}

View file

@ -0,0 +1,369 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.exceptions.DbQueryException;
import rife.tools.StringUtils;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;
public abstract class AbstractWhereQuery<QueryType extends AbstractWhereQuery> extends AbstractParametrizedQuery implements WhereQuery<QueryType>, Cloneable {
protected StringBuilder where_ = null;
AbstractWhereQuery(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public void clear() {
super.clear();
where_ = new StringBuilder();
}
public String getWhere() {
return where_.toString();
}
public QueryType whereSubselect(Select query) {
_whereSubselect(query);
return (QueryType) this;
}
public QueryType where(String where) {
if (null == where) throw new IllegalArgumentException("where can't be null.");
if (0 == where.length()) throw new IllegalArgumentException("where can't be empty.");
clearGenerated();
clearWhereParameters();
where_ = new StringBuilder(where);
return (QueryType) this;
}
public WhereGroup<QueryType> startWhere() {
return new WhereGroup<QueryType>(getDatasource(), this);
}
public QueryType whereAnd(String where) {
if (null == where) throw new IllegalArgumentException("where can't be null.");
if (0 == where.length()) throw new IllegalArgumentException("where can't be empty.");
if (0 == where_.length())
throw new IllegalArgumentException("can't perform whereAnd as initial where operation.");
clearGenerated();
where_.append(" AND ");
where_.append(where);
return (QueryType) this;
}
public WhereGroupAnd<QueryType> startWhereAnd() {
return new WhereGroupAnd<QueryType>(getDatasource(), this);
}
public QueryType whereOr(String where) {
if (null == where) throw new IllegalArgumentException("where can't be null.");
if (0 == where.length()) throw new IllegalArgumentException("where can't be empty.");
if (0 == where_.length())
throw new IllegalArgumentException("can't perform whereOr as initial where operation.");
clearGenerated();
where_.append(" OR ");
where_.append(where);
return (QueryType) this;
}
public WhereGroupOr<QueryType> startWhereOr() {
return new WhereGroupOr<QueryType>(getDatasource(), this);
}
public QueryType where(String field, String operator, boolean value) {
return where(field, operator, Boolean.valueOf(value));
}
public QueryType where(String field, String operator, Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ");
where_.append("(");
where_.append(query.toString());
where_.append(")");
whereSubselect(query);
return (QueryType) this;
}
public QueryType where(String field, String operator, Object value) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
clearGenerated();
clearWhereParameters();
where_ = new StringBuilder();
_where(field, operator, value);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, boolean value) {
return whereAnd(field, operator, Boolean.valueOf(value));
}
public QueryType whereAnd(String field, String operator, Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
where_.append(" AND ");
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ");
where_.append("(");
where_.append(query.toString());
where_.append(")");
whereSubselect(query);
return (QueryType) this;
}
public QueryType whereAnd(String field, String operator, Object value) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
clearGenerated();
where_.append(" AND ");
_where(field, operator, value);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, boolean value) {
return whereOr(field, operator, Boolean.valueOf(value));
}
public QueryType whereOr(String field, String operator, Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
where_.append(" OR ");
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ");
where_.append("(");
where_.append(query.toString());
where_.append(")");
whereSubselect(query);
return (QueryType) this;
}
public QueryType whereOr(String field, String operator, Object value) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
clearGenerated();
where_.append(" OR ");
_where(field, operator, value);
return (QueryType) this;
}
private void _where(String field, String operator, Object value) {
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ");
where_.append(datasource_.getSqlConversion().getSqlValue(value));
}
public QueryType whereParameter(String field, String operator) {
return whereParameter(field, field, operator);
}
public QueryType whereParameter(String field, String alias, String operator) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == alias) throw new IllegalArgumentException("alias can't be null.");
if (0 == alias.length()) throw new IllegalArgumentException("alias can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
clearGenerated();
clearWhereParameters();
where_ = new StringBuilder(field);
where_.append(" ");
where_.append(operator);
where_.append(" ?");
addWhereParameter(alias);
return (QueryType) this;
}
public QueryType whereParameterAnd(String field, String operator) {
return whereParameterAnd(field, field, operator);
}
public QueryType whereParameterAnd(String field, String alias, String operator) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == alias) throw new IllegalArgumentException("alias can't be null.");
if (0 == alias.length()) throw new IllegalArgumentException("alias can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
if (0 == where_.length())
throw new IllegalArgumentException("can't perform whereParameterAnd as initial where operation.");
clearGenerated();
where_.append(" AND ");
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ?");
addWhereParameter(alias);
return (QueryType) this;
}
public QueryType whereParameterOr(String field, String operator) {
return whereParameterOr(field, field, operator);
}
public QueryType whereParameterOr(String field, String alias, String operator) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == alias) throw new IllegalArgumentException("alias can't be null.");
if (0 == alias.length()) throw new IllegalArgumentException("alias can't be empty.");
if (null == operator) throw new IllegalArgumentException("operator can't be null.");
if (0 == operator.length()) throw new IllegalArgumentException("operator can't be empty.");
if (0 == where_.length())
throw new IllegalArgumentException("can't perform whereParameterOr as initial where operation.");
clearGenerated();
where_.append(" OR ");
where_.append(field);
where_.append(" ");
where_.append(operator);
where_.append(" ?");
addWhereParameter(alias);
return (QueryType) this;
}
public QueryType where(Object bean)
throws DbQueryException {
return whereFiltered(bean, null, null);
}
public QueryType whereIncluded(Object bean, String[] includedFields)
throws DbQueryException {
return whereFiltered(bean, includedFields, null);
}
public QueryType whereExcluded(Object bean, String[] excludedFields)
throws DbQueryException {
return whereFiltered(bean, null, excludedFields);
}
public QueryType whereFiltered(Object bean, String[] includedFields, String[] excludedFields)
throws DbQueryException {
if (null == bean) throw new IllegalArgumentException("bean can't be null.");
// TODO
// Constrained constrained = ConstrainedUtils.makeConstrainedInstance(bean);
ArrayList<String> where_parts = new ArrayList<String>();
Map<String, String> property_values = QueryHelper.getBeanPropertyValues(bean, includedFields, excludedFields, getDatasource());
for (String property_name : property_values.keySet()) {
// if (!ConstrainedUtils.persistConstrainedProperty(constrained, property_name, null))
// {
// continue;
// }
where_parts.add(property_name + " = " + property_values.get(property_name));
}
where(StringUtils.join(where_parts, " AND "));
return (QueryType) this;
}
public QueryType whereParameters(Class beanClass)
throws DbQueryException {
return whereParametersExcluded(beanClass, null);
}
public QueryType whereParametersExcluded(Class beanClass, String[] excludedFields)
throws DbQueryException {
if (null == beanClass) throw new IllegalArgumentException("beanClass can't be null.");
clearGenerated();
// TODO
// Constrained constrained = ConstrainedUtils.getConstrainedInstance(beanClass);
Set<String> property_names = QueryHelper.getBeanPropertyNames(beanClass, excludedFields);
for (String property_name : property_names) {
// if (!ConstrainedUtils.persistConstrainedProperty(constrained, property_name, null))
// {
// continue;
// }
if (null == getWhereParameters()) {
whereParameter(property_name, "=");
} else {
whereParameterAnd(property_name, "=");
}
}
return (QueryType) this;
}
public QueryType clone() {
AbstractWhereQuery new_instance = (AbstractWhereQuery) super.clone();
if (new_instance != null) {
if (where_ != null) {
new_instance.where_ = new StringBuilder(where_.toString());
}
}
return (QueryType) new_instance;
}
}

View file

@ -0,0 +1,92 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.capabilities.Capabilities;
import rife.database.exceptions.DbQueryException;
import rife.database.exceptions.SequenceNameRequiredException;
import rife.database.exceptions.UnsupportedSqlFeatureException;
import rife.template.Template;
import rife.template.TemplateFactory;
import rife.tools.StringUtils;
/**
* Object representation of a SQL "CREATE SEQUENCE" query.
*
* <p>This object may be used to dynamically construct a SQL statement in a
* database-independent fashion. After it is finished, it may be executed using
* {@link rife.database.DbQueryManager#executeUpdate(Query)
* DbQueryManager.executeUpdate()}.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @since 1.0
*/
public class CreateSequence extends AbstractQuery implements Cloneable {
private String name_ = null;
public CreateSequence(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public void clear() {
super.clear();
name_ = null;
}
public String getName() {
return name_;
}
public Capabilities getCapabilities() {
return null;
}
public String getSql()
throws DbQueryException {
if (null == sql_) {
if (null == name_) {
throw new SequenceNameRequiredException("CreateSequence");
} else {
Template template = TemplateFactory.SQL.get("sql." + StringUtils.encodeClassname(datasource_.getAliasedDriver()) + ".create_sequence");
if (template.hasValueId("NAME")) {
template.setValue("NAME", name_);
}
sql_ = template.getBlock("QUERY");
if (0 == sql_.length()) {
throw new UnsupportedSqlFeatureException("CREATE SEQUENCE", datasource_.getAliasedDriver());
}
assert sql_ != null;
assert sql_.length() > 0;
}
}
return sql_;
}
public CreateSequence name(String name) {
if (null == name) throw new IllegalArgumentException("name can't be null.");
if (0 == name.length()) throw new IllegalArgumentException("name can't be empty.");
clearGenerated();
name_ = name;
return this;
}
public CreateSequence clone() {
return (CreateSequence) super.clone();
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,111 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.capabilities.Capabilities;
import rife.database.exceptions.TableNameRequiredException;
import rife.database.exceptions.UnsupportedSqlFeatureException;
import rife.template.Template;
import rife.template.TemplateFactory;
import rife.tools.StringUtils;
/**
* Object representation of a SQL "DELETE" query.
*
* <p>This object may be used to dynamically construct a SQL statement in a
* database-independent fashion. After it is finished, it may be executed using
* {@link rife.database.DbQueryManager#executeUpdate(Query)
* DbQueryManager.executeUpdate()}.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @since 1.0
*/
public class Delete extends AbstractWhereQuery<Delete> implements Cloneable {
private String hint_ = null;
private String from_ = null;
public Delete(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public String getHint() {
return hint_;
}
public void clear() {
super.clear();
hint_ = null;
from_ = null;
}
public String getFrom() {
return from_;
}
public Capabilities getCapabilities() {
return null;
}
public String getSql() {
if (null == sql_) {
if (null == from_) {
throw new TableNameRequiredException("Delete");
} else {
Template template = TemplateFactory.SQL.get("sql." + StringUtils.encodeClassname(datasource_.getAliasedDriver()) + ".delete");
if (hint_ != null) {
if (!template.hasValueId("HINT")) {
throw new UnsupportedSqlFeatureException("HINT", datasource_.getAliasedDriver());
}
template.setValue("EXPRESSION", hint_);
template.setBlock("HINT", "HINT");
}
template.setValue("TABLE", from_);
if (where_ != null &&
where_.length() > 0) {
template.setValue("CONDITION", where_);
template.setValue("WHERE", template.getBlock("WHERE"));
}
sql_ = template.getBlock("QUERY");
assert sql_ != null;
assert sql_.length() > 0;
}
}
return sql_;
}
public Delete hint(String hint) {
clearGenerated();
hint_ = hint;
return this;
}
public Delete from(String from) {
if (null == from) throw new IllegalArgumentException("from can't be null.");
if (0 == from.length()) throw new IllegalArgumentException("from can't be empty.");
clearGenerated();
from_ = from;
return this;
}
public Delete clone() {
return super.clone();
}
}

View file

@ -0,0 +1,91 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.capabilities.Capabilities;
import rife.database.exceptions.DbQueryException;
import rife.database.exceptions.SequenceNameRequiredException;
import rife.database.exceptions.UnsupportedSqlFeatureException;
import rife.template.Template;
import rife.template.TemplateFactory;
import rife.tools.StringUtils;
/**
* Object representation of a SQL "DROP SEQUENCE" query.
*
* <p>This object may be used to dynamically construct a SQL statement in a
* database-independent fashion. After it is finished, it may be executed using
* {@link rife.database.DbQueryManager#executeUpdate(Query)
* DbQueryManager.executeUpdate()}.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @since 1.0
*/
public class DropSequence extends AbstractQuery implements Cloneable {
private String name_ = null;
public DropSequence(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public void clear() {
super.clear();
name_ = null;
}
public String getName() {
return name_;
}
public Capabilities getCapabilities() {
return null;
}
public String getSql()
throws DbQueryException {
if (null == sql_) {
if (null == name_) {
throw new SequenceNameRequiredException("DropSequence");
} else {
Template template = TemplateFactory.SQL.get("sql." + StringUtils.encodeClassname(datasource_.getAliasedDriver()) + ".drop_sequence");
if (template.hasValueId("NAME")) {
template.setValue("NAME", name_);
}
sql_ = template.getBlock("QUERY");
if (0 == sql_.length()) {
throw new UnsupportedSqlFeatureException("DROP SEQUENCE", datasource_.getAliasedDriver());
}
assert sql_ != null;
assert sql_.length() > 0;
}
}
return sql_;
}
public DropSequence name(String name) {
if (null == name) throw new IllegalArgumentException("name can't be null.");
if (0 == name.length()) throw new IllegalArgumentException("name can't be empty.");
clearGenerated();
name_ = name;
return this;
}
public DropSequence clone() {
return (DropSequence) super.clone();
}
}

View file

@ -0,0 +1,117 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.capabilities.Capabilities;
import rife.database.exceptions.DbQueryException;
import rife.database.exceptions.TableNameRequiredException;
import rife.database.exceptions.UnsupportedSqlFeatureException;
import rife.template.Template;
import rife.template.TemplateFactory;
import rife.tools.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Object representation of a SQL "DROP TABLE" query.
*
* <p>This object may be used to dynamically construct a SQL statement in a
* database-independent fashion. After it is finished, it may be executed using
* {@link rife.database.DbQueryManager#executeUpdate(Query)
* DbQueryManager.executeUpdate()}.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @since 1.0
*/
public class DropTable extends AbstractQuery implements Cloneable {
private List<String> tables_ = null;
public DropTable(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public List<String> getTables() {
return tables_;
}
public void clear() {
super.clear();
tables_ = new ArrayList<String>();
assert 0 == tables_.size();
}
public Capabilities getCapabilities() {
return null;
}
public String getSql()
throws DbQueryException {
if (null == sql_) {
if (0 == tables_.size()) {
throw new TableNameRequiredException("DropTable");
} else {
Template template = TemplateFactory.SQL.get("sql." + StringUtils.encodeClassname(datasource_.getAliasedDriver()) + ".drop_table");
if (1 == tables_.size()) {
template.setValue("EXPRESSION", tables_.get(0));
} else {
if (template.hasValueId("TABLES")) {
template.setValue("TABLES", StringUtils.join(tables_, template.getBlock("SEPERATOR")));
}
String block = template.getBlock("TABLES");
if (0 == block.length()) {
throw new UnsupportedSqlFeatureException("MULTIPLE TABLE DROP", datasource_.getAliasedDriver());
}
template.setValue("EXPRESSION", block);
}
sql_ = template.getBlock("QUERY");
if (template.hasValueId("TABLES")) {
template.removeValue("TABLES");
}
template.removeValue("EXPRESSION");
assert sql_ != null;
assert sql_.length() > 0;
}
}
return sql_;
}
public DropTable table(String table) {
if (null == table) throw new IllegalArgumentException("table can't be null.");
if (0 == table.length()) throw new IllegalArgumentException("table can't be empty.");
tables_.add(table);
clearGenerated();
return this;
}
public DropTable clone() {
DropTable new_instance = (DropTable) super.clone();
if (new_instance != null) {
if (tables_ != null) {
new_instance.tables_ = new ArrayList<String>();
new_instance.tables_.addAll(tables_);
}
}
return new_instance;
}
}

View file

@ -0,0 +1,340 @@
/*
* Copyright 2001-2022 Geert Bevin <gbevin[remove] at uwyn dot com>
* Licensed under the Apache License, Version 2.0 (the "License")
*/
package rife.database.queries;
import rife.database.Datasource;
import rife.database.capabilities.Capabilities;
import rife.database.exceptions.DbQueryException;
import rife.database.exceptions.FieldsRequiredException;
import rife.database.exceptions.TableNameRequiredException;
import rife.database.exceptions.UnsupportedSqlFeatureException;
import rife.database.types.SqlNull;
import rife.template.Template;
import rife.template.TemplateFactory;
import rife.tools.StringUtils;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Object representation of a SQL "INSERT" query.
*
* <p>This object may be used to dynamically construct a SQL statement in a
* database-independent fashion. After it is finished, it may be executed using
* {@link rife.database.DbQueryManager#executeUpdate(Query)
* DbQueryManager.executeUpdate()}.
*
* @author Geert Bevin <gbevin[remove] at uwyn dot com>
* @author Steven Grimm (koreth[remove] at midwinter dot com)
* @since 1.0
*/
public class Insert extends AbstractParametrizedQuery implements Cloneable {
private String hint_ = null;
private String into_ = null;
private Map<String, List<Object>> fields_ = null;
public Insert(Datasource datasource) {
super(datasource);
if (null == datasource) throw new IllegalArgumentException("datasource can't be null.");
clear();
}
public void clear() {
super.clear();
hint_ = null;
into_ = null;
fields_ = new LinkedHashMap<String, List<Object>>();
assert 0 == fields_.size();
}
public String getHint() {
return hint_;
}
public String getInto() {
return into_;
}
public Map<String, List<Object>> getFields() {
return fields_;
}
public Capabilities getCapabilities() {
return null;
}
public String getSql()
throws DbQueryException {
if (null == sql_) {
if (null == into_) {
throw new TableNameRequiredException("Insert");
} else if (0 == fields_.size()) {
throw new FieldsRequiredException("Insert");
} else {
Template template = TemplateFactory.SQL.get("sql." + StringUtils.encodeClassname(datasource_.getAliasedDriver()) + ".insert");
if (hint_ != null) {
if (!template.hasValueId("HINT")) {
throw new UnsupportedSqlFeatureException("HINT", datasource_.getAliasedDriver());
}
template.setValue("EXPRESSION", hint_);
template.setBlock("HINT", "HINT");
}
template.setValue("INTO", into_);
// obtain the maximum number of values that are present by counting those of each field
int maximum_number_of_value_rows = 0;
for (List<Object> values : fields_.values()) {
if (values.size() > maximum_number_of_value_rows) {
maximum_number_of_value_rows = values.size();
}
}
// create the different rows that will be inserted into the database
ArrayList<String> value_rows = new ArrayList<String>();
ArrayList<String> value_row = null;
Object[] column_names = fields_.keySet().toArray();
String column_name = null;
for (int current_value_row = 0; current_value_row < maximum_number_of_value_rows; current_value_row++) {
value_row = new ArrayList<String>();
for (int i = 0; i < column_names.length; i++) {
column_name = (String) column_names[i];
if (current_value_row <= fields_.get(column_name).size() - 1) {
value_row.add(fields_.get(column_name).get(current_value_row).toString());
} else {
value_row.add("NULL");
}
}
template.setValue("VALUES", StringUtils.join(value_row, template.getBlock("SEPERATOR")));
value_rows.add(template.getBlock("VALUE_ROW"));
}
// create the strings of the columns that values will be inserted into and which values they are
template.setValue("COLUMNS", StringUtils.join(column_names, template.getBlock("SEPERATOR")));
if (1 == value_rows.size()) {
template.setValue("DATA", value_rows.get(0));
} else {
if (template.hasValueId("VALUE_ROWS")) {
template.setValue("VALUE_ROWS", StringUtils.join(value_rows, template.getBlock("SEPERATOR")));
}
String block = template.getBlock("VALUE_ROWS");
if (0 == block.length()) {
throw new UnsupportedSqlFeatureException("MULTIPLE INSERT ROWS", datasource_.getAliasedDriver());
}
template.setValue("DATA", block);
}
sql_ = template.getBlock("QUERY");
assert sql_ != null;
assert sql_.length() > 0;
}
}
return sql_;
}
public Insert hint(String hint) {
clearGenerated();
hint_ = hint;
return this;
}
public Insert into(String into) {
if (null == into) throw new IllegalArgumentException("into can't be null.");
if (0 == into.length()) throw new IllegalArgumentException("into can't be empty.");
clearGenerated();
into_ = into;
return this;
}
public Insert fieldSubselect(Select query) {
_fieldSubselect(query);
return this;
}
protected Insert _field(String field, Object value) {
assert field != null;
assert field.length() > 0;
clearGenerated();
if (!fields_.containsKey(field)) {
fields_.put(field, new ArrayList<Object>());
}
if (null == value) {
fields_.get(field).add(SqlNull.NULL);
} else {
fields_.get(field).add(value);
}
return this;
}
public Insert fieldParameter(String field) {
return fieldParameter(field, field);
}
public Insert fieldParameter(String field, String alias) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == alias) throw new IllegalArgumentException("alias can't be null.");
if (0 == alias.length()) throw new IllegalArgumentException("alias can't be empty.");
clearGenerated();
addFieldParameter(alias);
return _field(field, "?");
}
public Insert field(String field, boolean value) {
return field(field, Boolean.valueOf(value));
}
public Insert field(String field, Select query) {
if (null == query) throw new IllegalArgumentException("query can't be null.");
StringBuilder buffer = new StringBuilder();
buffer.append("(");
buffer.append(query.toString());
buffer.append(")");
fieldCustom(field, buffer.toString());
_fieldSubselect(query);
return this;
}
public Insert field(String field, Object value) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == value) {
return _field(field, null);
} else {
return _field(field, datasource_.getSqlConversion().getSqlValue(value));
}
}
public Insert fieldCustom(String field, String expression) {
if (null == field) throw new IllegalArgumentException("field can't be null.");
if (0 == field.length()) throw new IllegalArgumentException("field can't be empty.");
if (null == expression) {
return _field(field, null);
} else {
return _field(field, expression);
}
}
public Insert fields(Object[] keyValues) {
if (null == keyValues) throw new IllegalArgumentException("keyValues can't be null.");
if (0 == keyValues.length) throw new IllegalArgumentException("keyValues can't be empty.");
for (int i = 0; i < keyValues.length; i += 2) {
if (null != keyValues[i]) {
field(keyValues[i].toString(), keyValues[i + 1]);
}
}
return this;
}
public Insert fields(Object bean)
throws DbQueryException {
return fieldsFiltered(bean, null, null);
}
public Insert fieldsIncluded(Object bean, String[] includedFields)
throws DbQueryException {
return fieldsFiltered(bean, includedFields, null);
}
public Insert fieldsExcluded(Object bean, String[] excludedFields)
throws DbQueryException {
return fieldsFiltered(bean, null, excludedFields);
}
public Insert fieldsFiltered(Object bean, String[] includedFields, String[] excludedFields)
throws DbQueryException {
if (null == bean) throw new IllegalArgumentException("bean can't be null.");
// Constrained constrained = ConstrainedUtils.makeConstrainedInstance(bean);
Map<String, String> property_values = QueryHelper.getBeanPropertyValues(bean, includedFields, excludedFields, getDatasource());
for (String property_name : property_values.keySet()) {
// TODO
// if (!ConstrainedUtils.saveConstrainedProperty(constrained, property_name, null))
// {
// continue;
// }
_field(property_name, property_values.get(property_name));
}
return this;
}
public Insert fieldsParameters(Class beanClass)
throws DbQueryException {
return fieldsParametersExcluded(beanClass, null);
}
public Insert fieldsParametersExcluded(Class beanClass, String[] excludedFields)
throws DbQueryException {
if (null == beanClass) throw new IllegalArgumentException("beanClass can't be null.");
clearGenerated();
// Constrained constrained = ConstrainedUtils.getConstrainedInstance(beanClass);
Set<String> property_names = QueryHelper.getBeanPropertyNames(beanClass, excludedFields);
for (String property_name : property_names) {
// TODO
// if (!ConstrainedUtils.saveConstrainedProperty(constrained, property_name, null))
// {
// continue;
// }
addFieldParameter(property_name);
_field(property_name, "?");
}
return this;
}
public Insert clone() {
Insert new_instance = (Insert) super.clone();
if (new_instance != null) {
if (fields_ != null) {
new_instance.fields_ = new LinkedHashMap<String, List<Object>>();
List<Object> values = null;
for (String field : fields_.keySet()) {
values = fields_.get(field);
if (values != null) {
values = new ArrayList<Object>(values);
}
new_instance.fields_.put(field, values);
}
}
}
return new_instance;
}
}

Some files were not shown because too many files have changed in this diff Show more