SQLite do not store a BLOB data.
-
Good day. I have some difficultes with my SQLite database.
If i'm correct, SQLite database can contains a QByteArray data in BLOB(Binary Large Objects, obviously) fields. I've tried it, but maybe i do something wrong, or else, but database do not save my byte array. It's inserting, but when program reads table again - BLOB fields are empty. Interestingly, rest of the data, inserted along with an array of bytes stored. How it can be?
Here is a code of inserting data:
@MyServer::~MyServer()
{
if(!ServerDataBase.isOpen())
ServerDataBase.open();
QSqlQuery query;
query.exec("DELETE FROM users");
if(query.isActive())
{qDebug()<<"DELETE 'users' table was complited";}
else{qDebug()<<"DELETE 'users' table was not complited, because:"<<query.lastError();}query.exec("VACUUM"); if(query.isActive()) {qDebug()<<"VACUUM 'users' table was complited";} else{qDebug()<<"VACUUM 'users' table was not complited, because:"<<query.lastError();} query.exec("DELETE FROM Groups"); if(query.isActive()) {qDebug()<<"DELETE 'Groups' table was complited";} else{qDebug()<<"DELETE 'Groups' table was not complited, because:"<<query.lastError();} query.exec("VACUUM"); if(query.isActive()) {qDebug()<<"VACUUM 'Groups' table was complited";} else{qDebug()<<"VACUUM 'Groups' table was not complited, because:"<<query.lastError();} query.exec("DELETE FROM Files"); if(query.isActive()) {qDebug()<<"DELETE 'Files' table was complited";} else{qDebug()<<"DELETE 'Files' table was not complited, because:"<<query.lastError();} query.exec("VACUUM"); if(query.isActive()) {qDebug()<<"VACUUM 'Files' table was complited";} else{qDebug()<<"VACUUM 'Files' table was not complited, because:"<<query.lastError();} for(int i=0;i<Users.count();i++) { query.prepare("INSERT INTO users(login,password,post,listOfUsers,groups) VALUES(:login,:password,:post,:listOfUsers,:groups)"); query.bindValue(":login",Users.at(i).login); query.bindValue(":password",Users.at(i).password); query.bindValue(":post",Users.at(i).post); query.bindValue(":listOfUsers",Users.at(i).listOfUsers); query.bindValue(":groups",Users.at(i).groups); query.exec(); if(!query.isActive()) qDebug()<<"In table 'users' inserting was not complite, because:"<<query.lastError(); } ServerDataBase.commit(); QSqlQuery query2; for(int i=0; i<Groups.count();i++) { query2.prepare("INSERT INTO Groups(name, creator, exhibitors, files, password) VALUES(:name,:creator,:exhibitors,:files,:password)"); query2.bindValue(":name",Groups.at(i).name); query2.bindValue(":creator",Groups.at(i).creator); query2.bindValue(":exhibitors",Groups.at(i).exhibitors); query2.bindValue(":files",Groups.at(i).files); query2.bindValue(":password",Groups.at(i).password); query2.exec(); if(!query2.isActive()) qDebug()<<"In table 'Groups' inserting was not complite, because:"<<query2.lastError(); else if(query2.isActive()) qDebug()<<"In Groups was inserted:"<<Groups.at(i).name<<Groups.at(i).creator<<Groups.at(i).exhibitors<<Groups.at(i).files<<Groups.at(i).password; } ServerDataBase.commit(); QSqlQuery query3; if(Files.count()>0) { for(int i=0;i<Files.count();i++) { query3.prepare("INSERT INTO Files (nameOfFile, version, numberOfCurrentUsingCopy, sizeOfFile, groupOfUsers, byteFileArray, lastUser) VALUES(:nameOfFile, :version, :numberOfCurrentUsingCopy, :sizeOfFile, :groupOfUsers, :byteFileArray, :lastUser)"); query3.bindValue(":nameOfFile",Files.at(i).nameOfFile); query3.bindValue(":version",Files.at(i).version); if(Files[i].numberOfCurrentUsingCopy>0) Files[i].numberOfCurrentUsingCopy=0; query3.bindValue(":numberOfCurrentUsingCopy",Files.at(i).numberOfCurrentUsingCopy); query3.bindValue(":sizeOfFile",Files.at(i).sizeOfFile); query3.bindValue(":groupOfUsers",Files.at(i).Group); query3.bindValue(":byteFileArray",Files.at(i).byteFileArray); query3.bindValue(":lastUser",Files.at(i).lastUser); query3.exec(); if(query3.isActive()) qDebug()<<"In 'Files'' was inserted"<<Files.at(i).byteFileArray<<Files.at(i).nameOfFile<<Files.at(i).version<<Files.at(i).sizeOfFile<<Files.at(i).Group<<Files.at(i).lastUser; else qDebug()<<"In table 'Files' inserting was not complite, because:"<<query.lastError(); } ServerDataBase.commit(); } ServerDataBase.close();
}@
-
Take a look "here":http://sqlite.org/limits.html .
-
Unless you are writing byte arrays larger than 1 billion bytes you are not hitting the default limits of Sqlite.
Assuming that the column 'byteFileArray' is supposed to contain the blob then either:
You are inserting an empty byte array. We do no see how you created the byte array so we cannot verify that.
You are incorrectly retrieving the byte array. Since you do not show us how you have read the blob field we have no way of verifying that.
In the Sqlite command line tool (or something similar) execute:
@SELECT length(byteFileArray) from Files;
@to see if anything actually made it into the database. If the lengths are zero then see point 1, else see point 2.
-
Ok now let's to understand situation. Here is procedure of reading data, from the database.
@
struct TableOfFiles
{
QString nameOfFile;
int version;
int numberOfCurrentUsingCopy;
int sizeOfFile;
QString Group;
QByteArray byteFileArray;
QString lastUser;
QString contentOfFile;
};...
TableOfFiles tableFiles;
...
QSqlQuery query3;
query3.exec("SELECT * FROM Files");//The sequence of fields in a table //nameOfFile TEXT PRIMARY KEY, //version INTEGER, numberOfCurrentUsingCopy //INTEGER, sizeOfFile INTEGER, //Group TEXT, //byteFileArray BLOB, //lastUser TEXT, //contentOfFile TEXT while(query3.next()) { tableFiles.nameOfFile=query3.value(0).toString(); tableFiles.version=query3.value(1).toInt(); tableFiles.numberOfCurrentUsingCopy=query3.value(2).toInt(); tableFiles.sizeOfFile=query3.value(3).toInt(); tableFiles.Group=query3.value(4).toString(); tableFiles.byteFileArray=query.value(5).toByteArray(); tableFiles.lastUser=query.value(6).toString(); tableFiles.contentOfFile=query.value(7).toString(); if(query3.isActive()) qDebug()<<"From table 'Files' was readed:"<<tableFiles.byteFileArray<<tableFiles.nameOfFile<<tableFiles.version<<tableFiles.numberOfCurrentUsingCopy<<tableFiles.sizeOfFile<<tableFiles.Group<<tableFiles.lastUser; else qDebug()<<"From table 'Files' nothing was readed, because:"<<query3.lastError(); Files.append(tableFiles);
@
Results of procedure:
@
QSqlQuery::value: not positioned on a valid record
QSqlQuery::value: not positioned on a valid record
QSqlQuery::value: not positioned on a valid record
From table 'Files' was readed: "" "TestFile.doc" 0 0 26112 "TestGroup" ""
Check out byteFileArray from database ""
QSqlQuery::value: not positioned on a valid record
QSqlQuery::value: not positioned on a valid record
QSqlQuery::value: not positioned on a valid record
From table 'Files' was readed: "" "SecondTestGroup_version_0_TestFile.doc" 1 0 26112 "SecondTestGroup" ""
@And, as you may notice, in procedure of inserting data in database, after request about activity of the query, we have such code:
@
if(query3.isActive())
qDebug()<<"In 'Files'' was inserted"<<Files.at(i).byteFileArray<<Files.at(i).nameOfFile<<Files.at(i).version<<Files.at(i).sizeOfFile<<Files.at(i).Group<<Files.at(i).lastUser
@Results may tell us, what byteFileArray is defenetly not empty:
@
//In case, when we added in databse a new .doc file.
In 'Files'' was inserted "?Ia?±a//Old files that have lost content due to an error
In 'Files'' was inserted "" "TestFile.doc" 0 26112 "TestGroup" ""
@@
//After adding a new file .txt
In 'Files'' was inserted "This is a test file txt" "Тест.txt" 0 23 "SecondTestGroup" "client1"
@In case .doc files it has a problem with reading, but it's not empty. Also, size of .txt file is 23 bytes, size of .doc file - 26112 bytes. Defenetly less then 1000000 bytes.
Receiving bytearray:
@
QByteArray block;... while(!in.atEnd()) { block= in.device()->readAll(); quint64 toFile=block.size(); sizeReceiviedFileForGroups+=toFile; } ... if(sizeReceiviedFileForGroups>0&&filesizeForAddingFileInGroup!=0) { _serv->tableFiles.byteFileArray+=block; }
@
The procedure is similar to sending data on the server and client
@
QVector<QFile*> &sendFileVectorQByteArray data;
data.clear();
if(sendFileVector[Flowing_in_turn]->open(QIODevice::ReadOnly))
{
//sendFileVector[Flowing_in_turn]->seek(0);
while(!sendFileVector[Flowing_in_turn]->atEnd())
{
data=(*sendFileVector[Flowing_in_turn]).readAll();
}_sok->write(data); _sok->flush(); sendFileVector[Flowing_in_turn]->close(); data.clear(); }
@
-
There is a "wiki":http://qt-project.org/wiki/How_to_Store_and_Retrieve_Image_on_SQLite too
-
Thank you for your expansive reply. Did you run the simple query I asked?
Here's a complete working example
@
// main.cpp
#include <QtCore>
#include <QtSql>
#include <QDebug>int main(int argc, char *argv[])
{
QCoreApplication app(argc, argv);QByteArray data; QFile file("main.cpp"); if (file.open(QIODevice::ReadOnly)) data = file.readAll(); qDebug() << "Original data" << data.size() << "bytes"; QSqlDatabase db = QSqlDatabase::addDatabase("QSQLITE"); db.setDatabaseName(":memory:"); if (db.open()) { QSqlQuery qry(db); qry.prepare("CREATE TABLE test (id INTEGER PRIMARY KEY, size INTEGER, data BLOB)"); if (!qry.exec()) qWarning() << qry.lastError(); // Insert some blob goodness qry.prepare("INSERT INTO test (id, size, data) VALUES (?, ?, ?)"); for (int i = 0; i < 5; ++i) { qry.bindValue(0, i); qry.bindValue(1, data.size()); qry.bindValue(2, data); if (!qry.exec()) qWarning() << qry.lastError(); // make the blob grow for some variety data += data; } // Now let us get them back qry.prepare("SELECT id, size, data FROM test"); if (qry.exec()) { while (qry.next()) { qDebug() << qry.value(0).toLongLong() << qry.value(1).toLongLong() << "stored" << qry.value(2).toByteArray().size() << "bytes retrieved"; } } else qWarning() << qry.lastError(); } return 0;
}
@
If that works (and it will) then the problem is elsewhere in your code.The size of file you are writing to the database, and the actual size of the byte array of data may never have been the same
The stuff at the end about "receiving bytearray" has nothing to do with your thread title and question. However, incorrectly handling network data is a common way to not have the compete file contents you think you have. The usual mistake is to assume that all the data arrives at the same time.