Image size mismatch in postgres 9.1 - java

I am using postgres PostgreSQL 9.1 and having a table with column bytea data type. When tried to insert an image, it is able to insert the image into the table, the schema of the table is as follows:
CREATE TABLE emp
(
uname character varying(100) NOT NULL,
pass character varying(100) NOT NULL,
name character varying(100) NOT NULL,
dob date,
country character varying(20),
region character varying(20),
description character varying(3000),
role character varying(100),
photo bytea,
CONSTRAINT emp_pkey PRIMARY KEY (uname )
)
WITH (
OIDS=FALSE
);
ALTER TABLE emp
OWNER TO postgres;
The sample java code is as follows:
package com.q4;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.sql.*;
public class DataTest {
public void insert() throws Exception
{
Connection c = ConnectionHelper.getConnection();
FileInputStream fis = new FileInputStream("C:\\pics\\viticcio.jpg");
int counter = 0;
String sql ="insert into emp(uname, pass,name,photo) values (?,?,?,?)";
PreparedStatement pstmt = c.prepareStatement(sql);
pstmt.setString(1, "Senthil1");
pstmt.setString(2, "Password");
pstmt.setString(3, "Senthil1");
//pstmt.setBlob(4, fis);
while(fis.read( ) != -1) counter++;
byte[] b= new byte[counter];
fis.close();
fis = new FileInputStream("C:\\pics\\viticcio.jpg");
for(int i = 0;i<counter;i++)
{
b[i] = (byte)fis.read();
// System.out.print(b[i]);
}
System.out.println("Input File Size : " + counter); //Output 1
System.out.println(counter);
pstmt.setBytes(4, b);
// pstmt.setBlob(4, fis, counter);
pstmt.executeUpdate();
System.out.println("Successfully insertted ...");
}
public void select() throws Exception
{
String sql = "select * from emp where uname = ?";
Connection c = ConnectionHelper.getConnection();
PreparedStatement pstmt = c.prepareStatement(sql);
pstmt.setString(1, "Senthil1");
ResultSet set = pstmt.executeQuery();
if(set.next())
{
FileOutputStream fos = new FileOutputStream("C:\\test.jpg");
byte[] a = set.getBytes("photo");
System.out.println("Output Filesize : "+a.length); //Output 2
for(int i = 0; i <a.length;i++)
{
fos.write((byte)a[i]);
// System.out.print((byte)a[i]);
fos.flush();
}
fos.close();
}
pstmt.close();
c.close();
}
public static void main(String[]s) throws Exception
{
new DataTest().insert();
new DataTest().select();
}
}
The output I am getting when run this program is given below, and the file test.jpg gets created in C:\ but the size of this file is twice as the size of the file read.
Input File Size : 6455
6455
Successfully insertted ...
SELECT ......... 12909
Please clarify What could be the root cause of the problem.
Thanks in advance
Senthil

fis.read() returns the number of bytes read, to get the data initialize the byte array (in my code, this is b) to the file length and call the read as follows:
File file = new File("C:\\pics\\viticcio.jpg");
byte[] b = new bye[file.length()];
fis.read(b);
pstmt.setBytes(4, b);
PreparedStatement is read-only by default. To set it updatable initialise as follows:
PreparedStatement pstmt = c.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE);

Related

Java Import Excel Data into MySQL about encoding

here my source!!
String url = "jdbc:mysql://localhost/dahan?characterEncoding=euckr";
String user = "root";
String password = "pass";
try{
Class.forName("com.mysql.jdbc.Driver");
Connection con = (Connection) DriverManager.getConnection(url,user,password);
con.setAutoCommit(false);
PreparedStatement pstm = null ;
PreparedStatement pstm1 = null ;
PreparedStatement pstm2 = null ;
FileInputStream input = new FileInputStream("C:/Users/hyunwoo/Downloads/Personal Contacts.xls");
POIFSFileSystem fs = new POIFSFileSystem( input );
HSSFWorkbook wb = new HSSFWorkbook(fs);
HSSFSheet sheet = wb.getSheetAt(0);
Row row;
String del = "DROP TABLE IF EXISTS `dahanMail`";
String cre = "CREATE TABLE `dahanMail` (`전체이름` varchar(50),`성` varchar(50),`이름` varchar(50) ,`닉네임` varchar(50),"
+ "`회사` varchar(500),`부서` varchar(500),`직급` varchar(500),`메일주소1` varchar(50),"
+ "`메일주소2` varchar(50),`전화번호1` varchar(50),`전화번호2` varchar(50),`전화번호3` varchar(50),"
+ "`휴대폰1` varchar(50),`휴대폰2` varchar(50),`팩스1` varchar(50),`팩스2` varchar(50),"
+ "`주소` varchar(50),`웹사이트` varchar(50),`id` int NOT NULL ,PRIMARY KEY (`id`))";
pstm1 = (PreparedStatement) con.prepareStatement(del);
pstm2 = (PreparedStatement) con.prepareStatement(cre);
pstm1.execute();
pstm2.execute();
for(int i=0; i<=sheet.getLastRowNum(); i++){
row = sheet.getRow(i);
String fullname = row.getCell(0).getStringCellValue();
String lastname = row.getCell(1).getStringCellValue();
String firstname = row.getCell(2).getStringCellValue();
String nickname = row.getCell(3).getStringCellValue();
String company = row.getCell(4).getStringCellValue();
String sub = row.getCell(5).getStringCellValue();
String hei = row.getCell(6).getStringCellValue();
String mailaddress1 = row.getCell(7).getStringCellValue();
String mailaddress2 = row.getCell(8).getStringCellValue();
String cnumber1 = row.getCell(9).getStringCellValue();
String cnumber2 = row.getCell(10).getStringCellValue();
String cnumber3 = row.getCell(11).getStringCellValue();
String pnumber1 = row.getCell(12).getStringCellValue();
String pnumber2 = row.getCell(13).getStringCellValue();
String fax1 = row.getCell(14).getStringCellValue();
String fax2 = row.getCell(15).getStringCellValue();
String address = row.getCell(16).getStringCellValue();
String website = row.getCell(17).getStringCellValue();
int id = (int) row.getCell(18).getNumericCellValue();
String sql = "INSERT INTO dahanmail VALUES('"+fullname+"','"+lastname+"','"+firstname+"'"
+ ",'"+nickname+"','"+company+"','"+sub+"','"+hei+"','"+mailaddress1+"','"+mailaddress2+"','"+cnumber1+"','"+cnumber2+"','"+cnumber3+"'"
+ ",'"+pnumber1+"','"+pnumber2+"','"+fax1+"','"+fax2+"',"
+ "'"+address+"','"+website+"','"+id+"')";
pstm = (PreparedStatement) con.prepareStatement(sql);
pstm.execute();
System.out.println("Import rows "+i);
}
con.commit();
pstm.close();
con.close();
input.close();
System.out.println("Success import excel to mysql table");
I have saved the excel data to mysql using the POI in Java
and It works well!
so I checked the mysql table in cmd
but console window still show broken language like "???"
it seems to be encoding problem...
How can I change the source to fix the encoding problem?
What you have looks like it's close to working, but there are some things I would change:
Firstly, declare your SQL before the loop, with ?'s for value placeholders:
String sql = "INSERT INTO dahanmail VALUES(?,?,?)";
PreparedStatement pstm = conn.prepareStatement(sql);
(I have not put enough ?s in the code above, there should be one for each value)
Then, in the loop, you can set the parameters:
pstm.setString(1,row.getCell(0).getStringCellValue());
pstm.setString(2,row.getCell(2).getStringCellValue());
//one for each value. You could use another loop.
Then (still in the loop), execute the statement:
pstm.execute();
Finally clear the parameters for next time round:
pstm.clearParameters();

Find tables in oracle that dont have composite primary key using DatabaseMetaData Java

String [] tableTypes = { "TABLE" };
DatabaseMetaData md = (DatabaseMetaData) dbConnection.getMetaData();
ResultSet rs = md.getTables(null, null, "%", tableTypes);
while (rs.next()) {
System.out.println(rs.getString(3));
}
Im using this part of the code to get all tables from my local oracle database but I need to change it in order to get back only the tablet that have only one primary key. Any ideas?
You could use DatabaseMetaData.getPrimaryKeys() for each table in that loop:
String [] tableTypes = { "TABLE" };
DatabaseMetaData md = dbConnection.getMetaData(); // the cast is unnecessary!
ResultSet rs = md.getTables(null, null, "%", tableTypes);
while (rs.next())
{
String schema = rs.getString(2);
String table = rs.getString(3);
ResultSet pkRs = md.getPrimaryKeys(null, schema, table);
int colCount = 0;
while (pkRs.next())
{
colCount ++;
}
pkRs.close();
if (colCount = 1)
{
System.out.println("Table " + table + " has a single column primary key");
}
}
However, this will be awfully slow. Using a query that retrieves this information directly from user_constraints and user_cons_columns is going to be a lot faster:
select col.table_name, count(*)
from user_constraints uc
join user_cons_columns col
on col.table_name = uc.table_name
and col.constraint_name = uc.constraint_name
where constraint_type = 'P'
group by col.table_name
having count(*) = 1;
You can use this code :
static Statement statement = null;
static ResultSet result = null;
/**
* #param args the command line arguments
*/
public static void main(String[] args) {
// TODO code application logic here
try {
Class.forName(driverClassName);
dbConnection = DriverManager.getConnection(url, username, passwd);
statement = dbConnection.createStatement();
String[] tableTypes = {"TABLE"};
DatabaseMetaData dbmd;
dbmd = dbConnection.getMetaData();
result = dbmd.getTables("%", username, "%", new String[]{tableTypes[0]});
while (result.next()) {
String tableName = result.getString("TABLE_NAME");
ResultSet tempSet = dbmd.getPrimaryKeys(null, username, tableName);
String keyName="";
int counter=0;
while (tempSet.next()) {
keyName = tempSet.getString("COLUMN_NAME");
counter++;
}
if(counter == 1) {
System.out.println(tableName);
}
}
} catch (Exception e) {
}
}
Table can have up to one primary key. This primary can be compound - i.e. consisting of multiple columns. The other (2nd) key might be UNIQUE (+ not NULL) which is not exactly the same as primary.
Best way how to check columns is to query ALL_CONTRAINTS view. JDBC method DatabaseMetaData has only limited functionality.

How to pass an array of BLOB to a stored oracle procedure?

I am allowing user to upload multiple files to my database. These file contents must be stored in my oracle database as BLOB.
How can i write a oracle procedure to do this ? (I have a little knowledge of Oracle stored procedures) ?
Once this is done how can i use the stored procedure in java using jdbc's CallableStatement ?
Please help.
First of all, you have to create the type that will contain the table of BLOB:
CREATE OR REPLACE TYPE tab_blobs AS TABLE OF BLOB;
In Java, you have to rely on the STRUCT type provided by Oracle sql.
You will create a STRUCT that will contain the array of BLOB to store into the DB.
The code would look like the following:
import java.sql.Connection;
import java.sql.SQLException;
import oracle.jdbc.driver.OracleDriver;
import oracle.sql.ARRAY;
import oracle.sql.ArrayDescriptor;
import oracle.sql.STRUCT;
import oracle.sql.StructDescriptor;
public class ArrayDemo
{
public static void passArray()
throws SQLException
{
Connection conn = new OracleDriver().defaultConnection();
byte[] fileInByteArray = "value".getBytes();
StructDescriptor itemDescriptor = StructDescriptor.createDescriptor("BLOB", conn);
Object[] itemAtributes = new Object[] {};
STRUCT itemObject1 = new STRUCT(itemDescriptor, conn, itemAtributes);
itemAtributes = new Object[] {};
STRUCT itemObject2 = new STRUCT(itemDescriptor, conn, itemAtributes);
STRUCT[] idsArray = { itemObject1, itemObject2 };
ArrayDescriptor descriptor = ArrayDescriptor.createDescriptor("IDS_TABLE", conn);
ARRAY array_to_pass = new ARRAY(descriptor, conn, idsArray);
OraclePreparedStatement ps = (OraclePreparedStatement) conn.prepareStatement("begin getInfo(:x); end;");
ps.setARRAY(1, array_to_pass);
ps.execute();
}
}
But why don't you simplify the handling by iterating on the files, inserting them one after the other:
public static void insererBlob(String name, String path) {
File file = new File(path);
try{
//link to DB
Connection connection = DriverManager.getConnection("url","user","password");
//link to file
FileInputStream stream = new FileInputStream(file);
//prepare the SQL instruction
String sql = "INSERT INTO file_table VALUES (?, ?)";
PreparedStatement statement = connection.prepareStatement(sql);
//blob insertion
statement.setString(1, name);
statement.setBinaryStream(2, stream, (int)file.length());
statement.executeUpdate();
}catch(Exception e){
//ERROR SQL, IO, etc .
}finally {
//close connection ?
}
}
Here is another attempty to help you.
You can find more info from oracle here:
http://docs.oracle.com/cd/B28359_01/java.111/e10788/connect.htm#CHDGHFCG
http://docs.oracle.com/javase/tutorial/jdbc/basics/
Also, example taken from (website of great help at the time i had to do it):
http://docs.oracle.com/javase/tutorial/jdbc/basics/blob.html
public void addRowToCoffeeDescriptions(
String coffeeName, String fileName)
throws SQLException {
PreparedStatement pstmt = null;
try {
Clob myClob = this.con.createClob();
Writer clobWriter = myClob.setCharacterStream(1);
String str = this.readFile(fileName, clobWriter);
System.out.println("Wrote the following: " +
clobWriter.toString());
if (this.settings.dbms.equals("mysql")) {
System.out.println(
"MySQL, setting String in Clob " +
"object with setString method");
myClob.setString(1, str);
}
System.out.println("Length of Clob: " + myClob.length());
String sql = "INSERT INTO COFFEE_DESCRIPTIONS " +
"VALUES(?,?)";
pstmt = this.con.prepareStatement(sql);
pstmt.setString(1, coffeeName);
pstmt.setClob(2, myClob);
pstmt.executeUpdate();
} catch (SQLException sqlex) {
JDBCTutorialUtilities.printSQLException(sqlex);
} catch (Exception ex) {
System.out.println("Unexpected exception: " + ex.toString());
} finally {
if (pstmt != null)pstmt.close();
}
}
Below is my code hope you got your answer:
from java code:
try {Class.forName("oracle.jdbc.driver.OracleDriver");
String url = "jdbc:oracle:thin:#localhost:1521:orcl";
Connection con = DriverManager.getConnection(url, db_user, password);
System.out.println("Connected to database");
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy");
Date now = new java.sql.Date(simpleDateFormat.parse("12/02/2001").getTime());
String command2 = "{call USER1(?,?,?,?)}";
String path;
File[] roots = File.listRoots();
path=roots[0].getPath()+"dfg.jpg";
System.out.println("path: " + path);
//shows drives in you computer
for(int i = 0; i < roots.length ; i++){
System.out.println("drive: " + roots[i].getPath());
}
CallableStatement insertStatment = con.prepareCall(command2);
insertStatment.setInt(1, 18);
insertStatment.setString(2, "ankssaait");
insertStatment.setDate(3, now);
File file = new File(path);
//link to file
FileInputStream stream = new FileInputStream(file);
insertStatment.setBinaryStream(4, stream,(int)file.length());;
System.out.println("onExecute: "+ insertStatment.executeUpdate());
insertStatment.close();
System.out.println("done");
} catch (Exception e) {
e.printStackTrace();
}
my tablei is:
CREATE TABLE "ANKIT"."O_USER"
( "NAME" VARCHAR2(20 BYTE),
"GENDER" CHAR(1 BYTE),
"DESIGNTION" VARCHAR2(20 BYTE),
"YEAR" INTERVAL YEAR (2) TO MONTH,
"ID" NUMBER NOT NULL ENABLE,
"DOB" DATE,
"PROOF" CLOB,
"ADDRESS" VARCHAR2(100 BYTE),
"TELEPHONE" NUMBER,
"RAW1" RAW(20),
"IMAGE" BLOB,
CONSTRAINT "O_USER_PK" PRIMARY KEY ("ID"));
my procedure is:
CREATE OR REPLACE PROCEDURE USER1 (U_ID IN o_user.id%TYPE, U_NAME in o_user.name%TYPE, u_DOB in o_user.dob%TYPE, u_image in o_user.image%TYPE) AS BEGIN insert into o_user(id,name,dob,image) values(u_id,u_name,u_dob,u_image); END USER1;

Insert CLOB into Oracle database

My question is: How do you get around the ORA-01704: string literal too long error when inserting (or doing anything in queries) with CLOBs?
I want to have a query like this:
INSERT ALL
INTO mytable VALUES ('clob1')
INTO mytable VALUES ('clob2') --some of these clobs are more than 4000 characters...
INTO mytable VALUES ('clob3')
SELECT * FROM dual;
When I try it with actual values though I get ORA-01704: string literal too long back. This is pretty obvious, but how do I insert clobs (or execute any statement at all with a clob)?
I've tried looking at this question, but I don't think it has what I'm looking for. The clobs I have are in a List<String> and I iterate through them to make the statement. My code as it is follows:
private void insertQueries(String tempTableName) throws FileNotFoundException, DataException, SQLException, IOException {
String preQuery = " into " + tempTableName + " values ('";
String postQuery = "')" + StringHelper.newline;
StringBuilder inserts = new StringBuilder("insert all" + StringHelper.newline);
List<String> readQueries = getDomoQueries();
for (String query : readQueries) {
inserts.append(preQuery).append(query).append(postQuery);
}
inserts.append("select * from dual;");
DatabaseController.getInstance().executeQuery(databaseConnectionURL, inserts.toString());
}
public ResultSet executeQuery(String connection, String query) throws DataException, SQLException {
Connection conn = ConnectionPool.getInstance().get(connection);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(query);
conn.commit();
ConnectionPool.getInstance().release(conn);
return rs;
}
You are making it way to complicated.
Use a PreparedStatement and addBatch() for each clob in your list:
String sql = "insert into " + tempTableName + " values (?)";
PreparedStatement stmt = connection.prepareStatement(sql);
for (String query : readQueries) {
stmt.setCharacterStream(1, new StringReader(query), query.lenght());
stmt.addBatch();
}
stmt.exececuteBatch();
No messing around with escaping strings, no problem with the length of the literals, no need to create temporary clobs. And most probably just as fast as using a single INSERT ALL statement.
If you are using a current driver (> 10.2) then I think the setCharacterStream() call and the creation of the Reader is not necessary either. A simple setString(1, query) will most probably work as well.
You'll need to use bind variables rather than building a SQL statement using string concatenation. This will be beneficial from a security, performance, and robustness standpoint as well since it will reduce the risk of SQL injection attacks, decrease the amount of time Oracle has to spend doing hard parses of the SQL statement, and will eliminate the potential that there is a special character in the string that causes an invalid SQL statement to get generated (i.e. a single quote).
I would expect that you want something like
private void insertQueries(String tempTableName) throws FileNotFoundException, DataException, SQLException, IOException {
String preQuery = " into " + tempTableName + " values (?)" + StringHelper.newline;
StringBuilder inserts = new StringBuilder("insert all" + StringHelper.newline);
List<String> readQueries = getDomoQueries();
for (String query : readQueries) {
inserts.append(preQuery);
}
inserts.append("select * from dual");
Connection conn = ConnectionPool.getInstance().get(connection);
PreparedStatement pstmt = conn.prepareStatement(
inserts);
int i = 1;
for (String query : readQueries) {
Clob clob = CLOB.createTemporary(conn, false, oracle.sql.CLOB.DURATION_SESSION);
clob.setString(i, query);
pstmt.setClob(i, clob);
i = i + 1;
}
pstmt.executeUpdate();
}
BLOB (Binary Large Objects ) and CLOB(Character large objects) are special datatypes and can hold the large chunks of data in form of objects or text. Blob and Clob objects persist the data of the objects into the database as a stream.
An example piece of code:
public class TestDB {
public static void main(String[] args) {
try {
/** Loading the driver */
Class.forName("com.oracle.jdbc.Driver");
/** Getting Connection */
Connection con = DriverManager.getConnection("Driver URL","test","test");
PreparedStatement pstmt = con.prepareStatement("insert into Emp(id,name,description)values(?,?,?)");
pstmt.setInt(1,5);
pstmt.setString(2,"Das");
// Create a big CLOB value...AND inserting as a CLOB
StringBuffer sb = new StringBuffer(400000);
sb.append("This is the Example of CLOB ..");
String clobValue = sb.toString();
pstmt.setString(3, clobValue);
int i = pstmt.executeUpdate();
System.out.println("Done Inserted");
pstmt.close();
con.close();
// Retrive CLOB values
Connection con = DriverManager.getConnection("Driver URL","test","test");
PreparedStatement pstmt = con.prepareStatement("select * from Emp where id=5");
ResultSet rs = pstmt.executeQuery();
Reader instream = null;
int chunkSize;
if (rs.next()) {
String name = rs.getString("name");
java.sql.Clob clob = result.getClob("description")
StringBuffer sb1 = new StringBuffer();
chunkSize = ((oracle.sql.CLOB)clob).getChunkSize();
instream = clob.getCharacterStream();
BufferedReader in = new BufferedReader(instream);
String line = null;
while ((line = in.readLine()) != null) {
sb1.append(line);
}
if (in != null) {
in.close();
}
// this is the clob data converted into string
String clobdata = sb1.toString();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
From Oracle document
You must bear in mind the following automatic switching of the input mode for large data.
There are three input modes as follows: Direct binding, Stream binding, and LOB binding.
For PL/SQL statements
The setBytes and setBinary stream methods use direct binding for data less than 32767 bytes.
The setBytes and setBinaryStream methods use LOB binding for data larger than 32766 bytes.
The setString, setCharacterStream, and setAsciiStream methods use direct binding for data smaller than 32767 bytes in the database character set.
The setString, setCharacterStream, and setAsciiStream methods use LOB binding for data larger than 32766 bytes in the database character set.
The setBytesForBlob and setStringForClob methods, present in the oracle.jdbc.OraclePreparedStatement interface, use LOB binding for any data size.
Follow is a example for put a file content into a input CLOB parameter of a PLSQL procedure:
public int fileToClob( FileItem uploadFileItem ) throws SQLException, IOException
{
//for using stmt.setStringForClob method, turn the file to a big String
FileItem item = uploadFileItem;
InputStream inputStream = item.getInputStream();
InputStreamReader inputStreamReader = new InputStreamReader( inputStream );
BufferedReader bufferedReader = new BufferedReader( inputStreamReader );
StringBuffer stringBuffer = new StringBuffer();
String line = null;
while((line = bufferedReader.readLine()) != null) { //Read till end
stringBuffer.append(line);
stringBuffer.append("\n");
}
String fileString = stringBuffer.toString();
bufferedReader.close();
inputStreamReader.close();
inputStream.close();
item.delete();
OracleCallableStatement stmt;
String strFunction = "{ call p_file_to_clob( p_in_clob => ? )}";
stmt= (OracleCallableStatement)conn.prepareCall(strFunction);
try{
SasUtility servletUtility = sas.SasUtility.getInstance();
stmt.setStringForClob(1, fileString );
stmt.execute();
} finally {
stmt.close();
}
}
Me, I like to use the classes from java.sql.* package, not oracle.* stuff. For me the simple approach
Connection con = ...;
try (PreparedStatement pst = con.prepareStatement(
"insert into tbl (other_fld, clob_fld) values (?,?)", new String[]{"tbl_id"});
) {
Clob clob = con.createClob();
readIntoClob(clob, inputStream);
pst.setString(1, "other");
pst.setClob(2, clob);
pst.executeUpdate();
try (ResultSet rst = pst.getGeneratedKeys()) {
if (rst == null || !rst.next()) {
throw new Exception("error with getting auto-generated key");
}
id = rst.getBigDecimal(1);
}
stopped working when testing (current tomcat, jdbc) moved into production (stuck in Tomcat6 for stupid reasons). con.createClob() returns null for reasons unknown in that version, so I had to do this double-take (It took me ages to figure out so I'm sharing here...)
try (PreparedStatement pst = con.prepareStatement(
"insert into tbl (other_fld) values (?)", new String[]{"tbl_id"});
PreparedStatement getClob= con.prepareStatement(
"select clob_fld from tbl where tbl_id = ? for update");
) {
Clob clob = con.createClob();
readIntoClob(clob, inputStream);
pst.setString(1, "other");
pst.executeUpdate();
try (ResultSet rst = pst.getGeneratedKeys()) {
if (rst == null || !rst.next()) {
throw new Exception("error with getting auto-generated key");
}
id = rst.getBigDecimal(1);
}
// fetch back fresh record, with the Clob
getClob.setBigDecimal(1, id);
getClob.execute();
try (ResultSet rst = getClob.getResultSet()) {
if (rst == null || !rst.next()) {
throw new Exception("error with fetching back clob");
}
Clob c = rst.getClob(1);
// Fill in data
readIntoClob(c, stream);
// that's all
}
} catch (SQLException) {
...
}
for completeness here's
// Read data from an input stream and insert it in to the clob column
private static void readIntoClob(Clob clob, InputStream stream) {
try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(stream))) {
char[] buffer = new char[CHUNK_BUFFER_SIZE];
int charsRead;
try (Writer wr = clob.setCharacterStream(1L)) {
// Loop for reading of chunk of data and then write into the clob.
while ((charsRead = bufferedReader.read(buffer)) != -1) {
wr.write(buffer, 0, charsRead);
}
} catch (SQLException | IOException ex) {
...
}
}
}
which is from elsewhere on SO, thanks.
Check out some CLOB related samples on github.

How to write / update Oracle blob in a reliable way?

I'm trying to write and update a pdf document in a blob column but I'm just able to update the blob only writing more data than the previous stored data.
If I try to update the blob column with a smaller document data I get only a corrupted pdf.
First the blob column has been initialized using empty_blob() function. I wrote the sample Java class below to test this behaviour. I run it the first time with 'true' as first parameter of the main method so in the first row there's stored a document of about 31kB and in the second row there's a document of 278kB.
Then I run it with 'false' as parameter, in this way the two rows should be updated swapping the documents. The result is that I get a correct result only when I write more data than the existing one.
How is it possible to write a method that writes and updates a blob in a reliable way without worring about binary data's size?
import static org.apache.commons.io.IOUtils.copy;
import java.io.FileInputStream;
import java.io.OutputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import oracle.jdbc.OracleDriver;
import oracle.jdbc.OracleResultSet;
import oracle.sql.BLOB;
import org.apache.commons.lang.ArrayUtils;
/**
* Prerequisites:
* 1) a table named 'x' must exists [create table x (i number, j blob);]
* 2) that table should have two columns [insert into x (i, j) values (1, empty_blob()); insert into x (i, j) values (2, empty_blob()); commit;]
* 3) download lsp.pdf from http://www.objectmentor.com/resources/articles/lsp.pdf
* 4) download dotguide.pdf from http://www.graphviz.org/Documentation/dotguide.pdf
*/
public class UpdateBlob {
public static void main(String[] args) throws Exception {
processFiles(new String[]{"lsp.pdf", "dotguide.pdf"}, Boolean.valueOf(args[0]));
}
public static void processFiles(String [] fileNames, boolean forward) throws Exception {
if(!forward){
ArrayUtils.reverse(a);
}
int idx = 1;
for(String fname : fileNames){
insert(idx++, fname);
}
}
private static void insert(int idx, String fname) throws Exception{
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
DriverManager.registerDriver(new OracleDriver());
conn = DriverManager.getConnection("jdbc:oracle:thin:#"+db+":"+port+":"+sid, user, pwd);
ps = conn.prepareStatement("select j from x where i = ? for update");
ps.setLong(1, idx);
rs = ps.executeQuery();
if (rs.next()) {
FileInputStream instream = new FileInputStream(fname);
BLOB blob = ((OracleResultSet)rs).getBLOB(1);
OutputStream outstream = blob.setBinaryStream(1L);
copy(instream, outstream);
instream.close();
outstream.close();
}
rs.close();
ps.close();
conn.close();
} catch (SQLException e) {
e.printStackTrace();
throw new Exception(e);
}
}
}
Oracle version: 11.1.0.7.0 - 64bit
I even tried the standard JDBC API without using Oracle's specific one (like in the example above) without any success.
It's a lot easier:
PreparedStatement pstmt =
conn.prepareStatement("update blob_table set blob = ? where id = ?");
File blob = new File("/path/to/picture.png");
FileInputStream in = new FileInputStream(blob);
// the cast to int is necessary because with JDBC 4 there is
// also a version of this method with a (int, long)
// but that is not implemented by Oracle
pstmt.setBinaryStream(1, in, (int)blob.length());
pstmt.setInt(2, 42); // set the PK value
pstmt.executeUpdate();
conn.commit();
pstmt.close();
It works the same when using an INSERT statement. No need for empty_blob() and a second update statement.
In addition to a_horse_with_no_name's answer (which relies on PreparedStatement.setBinaryStream(...) API), there're at least two more options for BLOBs, and 3 more for CLOBs and NCLOBs:
Explicitly create a LOB, write to it, and use PreparedStatement.setBlob(int, Blob):
int insertBlobViaSetBlob(final Connection conn, final String tableName, final int id, final byte value[])
throws SQLException, IOException {
try (final PreparedStatement pstmt = conn.prepareStatement(String.format("INSERT INTO %s (ID, VALUE) VALUES (?, ?)", tableName))) {
final Blob blob = conn.createBlob();
try (final OutputStream out = new BufferedOutputStream(blob.setBinaryStream(1L))) {
out.write(value);
}
pstmt.setInt(1, id);
pstmt.setBlob(2, blob);
return pstmt.executeUpdate();
}
}
Update an empty LOB (inserted via DBMS_LOB.EMPTY_BLOB() or DBMS_LOB.EMPTY_CLOB()) via SELECT ... FOR UPDATE. This is Oracle-specific and requires two statements executed instead of one. Additionally, this is what you were trying to accomplish in the first place:
void insertBlobViaSelectForUpdate(final Connection conn, final String tableName, final int id, final byte value[])
throws SQLException, IOException {
try (final PreparedStatement pstmt = conn.prepareStatement(String.format("INSERT INTO %s (ID, VALUE) VALUES (?, EMPTY_BLOB())", tableName))) {
pstmt.setInt(1, id);
pstmt.executeUpdate();
}
try (final PreparedStatement pstmt = conn.prepareStatement(String.format("SELECT VALUE FROM %s WHERE ID = ? FOR UPDATE", tableName))) {
pstmt.setInt(1, id);
try (final ResultSet rset = pstmt.executeQuery()) {
while (rset.next()) {
final Blob blob = rset.getBlob(1);
try (final OutputStream out = new BufferedOutputStream(blob.setBinaryStream(1L))) {
out.write(value);
}
}
}
}
}
For CLOBs and NCLOBs, you can additionally use PreparedStatement.setString() and setNString(), respectively.
FWIW, for something that fits in memory, I found I could simply pass in a byte array as the prepared statement parameter, rather than going through the "stream" rigor morale (or worse Oracle specific/suggested things)
Using a Spring "JDBC template" wrapper (org.springframework.jdbc.core.JdbcTemplate) to put the contents of a "large" (or not) string into a BLOB column, the code is something like the following:
jdbc.update( "insert into a_table ( clob_col ) values ( ? )", largeStr.getBytes() );
There is no step 2.

Categories