Skip to content
  • Categories
  • Recent
  • Tags
  • Popular
  • Users
  • Groups
  • Search
  • Get Qt Extensions
  • Unsolved
Collapse
Brand Logo
  1. Home
  2. Qt Development
  3. General and Desktop
  4. C++ Qml integration

C++ Qml integration

Scheduled Pinned Locked Moved General and Desktop
1 Posts 1 Posters 376 Views
  • Oldest to Newest
  • Newest to Oldest
  • Most Votes
Reply
  • Reply as topic
Log in to reply
This topic has been deleted. Only users with topic management privileges can see it.
  • N Offline
    N Offline
    Naveen_D
    wrote on 1 Dec 2016, 09:43 last edited by
    #1

    Hi everyone,

    I have a qml application where i am integrating the c++ code by registering the class and calling the function in mouse area in qml code...the code builds without any errors and i am able to execute the code and get the output...but after executing when i click on the respective rectangle..it internally calls that function, after sometime the output which i got losses its color and becomes black...why does this happen..?

    here is the code
    Qml code....i am calling that function in id:voicerecRect rectangle mousearea.

    import QtQuick 2.0
    import QtQuick.Layouts 1.2
    import QtQuick.Controls 1.0
    import QtQuick.Dialogs 1.1
    import QtQuick.Controls 2.0
    
    import com.voicerecognition 1.0
    
    Rectangle {
        id: settings_main_rect
        width: parent.width
        height: parent.height
        color: "#e9ebf8"
    
        Image {
            id: back_img
            source: "qrc:/HomeScreen.png"
            anchors.centerIn: parent
        }
    
    
        Popup {
            id: popup
            width: parent.width/2
            height: parent.height/2
            x:width/2
            y:height/2
            modal: true
            focus: true
            closePolicy: Popup.CloseOnEscape | Popup.CloseOnPressOutsideParent
        }
    
        RowLayout{
            id: firstrowsettings
            spacing: 50
            anchors.centerIn: parent
            // wifi icon
            Rectangle{
                id:wifiRect
                width: settings_main_rect.width/5
                height: settings_main_rect.height/4
                color: "transparent"
                Image {
                    id: wifi_image
                    source: "qrc:/wifi-circle-icon12.png"
                    width: parent.width-10
                    height: parent.height-10
                    smooth: true
                    fillMode: Image.PreserveAspectFit
                    anchors.centerIn: parent
    
    
                    Text {
                        id: wifitext
                        anchors.top: parent.bottom
                        anchors.horizontalCenter: wifi_image.horizontalCenter
                        text: qsTr("Wifi")
                        color: "white"
                        font.pixelSize: parent.height * (2 / 9)
    
                    }
                }
            }
    
            // bluetooth icon
            Rectangle{
                id:bluetoothRect
                width: settings_main_rect.width/5
                height: settings_main_rect.height/4
                color: "transparent"
                Image {
                    id: bluetooth_image
                    source: "qrc:/App-Bluetooth-iconnew.png"
                    width: parent.width-10
                    height: parent.height-10
                    smooth: true
                    fillMode: Image.PreserveAspectFit
                    anchors.centerIn: parent
    
                    Text {
                        id: bluetoothtext
                        anchors.top: parent.bottom
                        anchors.horizontalCenter: bluetooth_image.horizontalCenter
                        text: qsTr("Bluetooth")
                        color: "white"
                        font.pixelSize: parent.height * (2 / 9)
    
                    }
                }
            }
    
            // voice recorder
            Rectangle{
                id:voicerecRect
                width: settings_main_rect.width/5
                height: settings_main_rect.height/4
                color: "transparent"
    
                VoiceRecognition {
                    id: voiceRecognizer
                }
    
                Image {
                    id: vr_image
                    source: "qrc:/AppbarIcon.png"
                    //source: "qrc:/Voice-Recoder-icon.png"
                    width: parent.width-10
                    height: parent.height-10
                    smooth: true
                    fillMode: Image.PreserveAspectFit
                    anchors.centerIn: parent
    
                    Text {
                        id: vrtext
                        anchors.top: parent.bottom
                        anchors.horizontalCenter: vr_image.horizontalCenter
                        text: qsTr("Voice Recorder")
                        color: "white"
                        font.pixelSize: parent.height * (2 / 9)
    
                    }
                    MouseArea {
                        anchors.fill: parent
                        onClicked: {
                            //popup.open()
                            voicerecRect.color = 'green'
                            voiceRecognizer.vstartVoiceRecognition();
                        }
                    }
                }
            }
        }
    
        // To go menu screen page
        Rectangle{
            id: back_page_rect
            width: parent.width/6
            height: parent.height/12
            color:"Transparent"
            anchors.right: parent.right
            anchors.bottom: parent.bottom
            anchors.rightMargin: 15
            anchors.bottomMargin: 8
            Image {
                id: back_image
                source: "qrc:/back_arrow.png"
                width: parent.width
                height: parent.height
    
                smooth: true
                fillMode: Image.PreserveAspectFit
                antialiasing: true
                MouseArea{
                    id: back_page_rect_mousearea
                    anchors.fill: parent
                    onClicked: {
                        settings_main_rect.visible= false
                        menusrcn.visible= true
                        voicerecRect.color = 'transparent'
    
                        //main_menurect.visible = false//
    
    
                    }
                }
            }
        }
    }
    

    .cpp code

    int VoiceRecognition::vstartVoiceRecognition()
    {
        /*********************************
         * converting file name to char* *
         *********************************/
        QString file= "/home/ubuntu/Documents/Sample_Examples_Qt_Qml/JuliusTest2/my.jconf";
    
        QByteArray b_file= file.toLatin1();
    
        char *fileName= b_file.data();
    
        /********************************************
         *                Start up                  *
         ********************************************/
        /********************************************
         * 1. Load configurations from .jconf file  *
         ********************************************/
    
        m_fileConfig= j_config_load_file_new(fileName);
    
        qDebug()<<"loaded file >>>>>>>"<<m_fileConfig<<endl;
    
        if(m_fileConfig== NULL)
        {
            qDebug()<<"Try `-help' for more information."<<endl;
            return -1;
        }
    
        /******************************************************
         * 2. Create recognition instance according to jconf **
         * It loads models, Setup final parameters, build *****
         * lexicon and setup work area for recognition ********
         ******************************************************/
    
        m_recog=j_create_instance_from_jconf(m_fileConfig);
        qDebug()<<"Recognition instance >>>>>"<<m_recog<<endl;
        if(m_recog== NULL)
        {
            qDebug()<<"Error in startup"<<endl;
            return -1;
        }
    
        callback_add(m_recog,CALLBACK_EVENT_SPEECH_READY, status_recready, NULL);
        callback_add(m_recog,CALLBACK_EVENT_SPEECH_READY,status_recstart,NULL);
        callback_add(m_recog,CALLBACK_RESULT,output_result,NULL);
    
        if(j_adin_init(m_recog)== FALSE)
        {
            return -1;
        }
    
        j_recog_info(m_recog);
    
        static char m_speechfilename[MAXPATHLEN];
    
        if(m_fileConfig->input.speech_input == SP_MFCFILE)
        {
            while(get_line_from_stdin(m_speechfilename,MAXPATHLEN,"enter MFCC filename ->")!= NULL)
            {
                if(verbose_flag)
                {
                    qDebug()<<endl<<"input MFCC file:"<<m_speechfilename<<endl;
                }
    
                m_ret= j_open_stream(m_recog,m_speechfilename);
                qDebug()<<"value of m_ret is>>>>"<<m_ret<<endl;
    
                switch(m_ret)
                {
                case 0 :
                    break;
                case -1 :
                    continue;
                case -2:
                    return 0;
                }
    
                m_ret=j_recognize_stream(m_recog);
                qDebug()<<"Value of m_ret for j_recognize_stream >>>>"<<m_ret<<endl;
    
                if(m_ret==-1)
                    return -1;
            }
        }
        else
        {
            switch(j_open_stream(m_recog,NULL))
            {
            case 0 :
                break;
            case -1 :
                qDebug()<<"Error in input stream"<<endl;
                return 0;
            case -2:
                qDebug()<<"Failed to begin input stream"<<endl;
                return 0;
            }
    
            m_ret=j_recognize_stream(m_recog);
            qDebug()<<"Value of m_ret for j_recognize_stream >>>>"<<m_ret<<endl;
    
            if(m_ret==-1)
                return -1;
        }
        j_close_stream(m_recog);
    
        j_recog_free(m_recog);
    
        return (0);
    }
    
    void VoiceRecognition:: status_recready(Recog *recog, void *dummy)
    {
        if (recog->jconf->input.speech_input == SP_MIC || recog->jconf->input.speech_input == SP_NETAUDIO)
        {
            qDebug()<<endl<< "<<< please speak >>>"<<endl;
        }
    }
    void VoiceRecognition:: status_recstart(Recog *recog, void *dummy)
    {
        if (recog->jconf->input.speech_input == SP_MIC || recog->jconf->input.speech_input == SP_NETAUDIO)
        {
            qDebug()<<"  "<<endl;
        }
    }
    
    void VoiceRecognition:: put_hypo_phoneme(WORD_ID *seq, int n, WORD_INFO *winfo)
    {
        int i,j;
        WORD_ID w;
        static char buf[MAX_HMMNAME_LEN];
    
        if (seq != NULL) {
            for (i=0;i<n;i++) {
                if (i > 0) /*printf(" |");*/
                    w = seq[i];
                for (j=0;j<winfo->wlen[w];j++) {
                    center_name(winfo->wseq[w][j]->name, buf);
                    //                qDebug()<<"Value od buffer >>>>"<<buf<<endl;
                }
            }
        }
    }
    
    void VoiceRecognition:: output_result(Recog *recog, void *dummy)
    {
        int i;
        WORD_INFO *winfo;
        WORD_ID *seq;
        int seqnum;
        int n;
        Sentence *s;
        RecogProcess *r;
        HMM_Logical *p;
        SentenceAlign *align;
    
        /* all recognition results are stored at each recognition process
         instance */
        for(r=recog->process_list;r;r=r->next) {
    
            /* skip the process if the process is not alive */
            if (! r->live) continue;
    
            /* result are in r->result.  See recog.h for details */
    
            /* check result status */
            if (r->result.status < 0) {      /* no results obtained */
                /* outout message according to the status code */
                switch(r->result.status) {
                case J_RESULT_STATUS_REJECT_POWER:
                    qDebug()<<"<input rejected by power>"<<endl;
                    break;
                case J_RESULT_STATUS_TERMINATE:
                    qDebug()<<"<input teminated by request>"<<endl;
                    break;
                case J_RESULT_STATUS_ONLY_SILENCE:
                    qDebug()<<"<input rejected by decoder (silence input result)>"<<endl;
                    break;
                case J_RESULT_STATUS_REJECT_GMM:
                    qDebug()<<"<input rejected by GMM>"<<endl;
                    break;
                case J_RESULT_STATUS_REJECT_SHORT:
                    qDebug()<<"<input rejected by short input>"<<endl;
                    break;
                    //      case J_RESULT_STATUS_REJECT_LONG:
                    //    printf("<input rejected by long input>\n");
                    //    break;
                case J_RESULT_STATUS_FAIL:
                    qDebug()<<"<search failed>"<<endl;
                    break;
                }
                /* continue to next process instance */
                continue;
            }
    
            /* output results for all the obtained sentences */
            winfo = r->lm->winfo;
    
            for(n = 0; n < r->result.sentnum; n++) { /* for all sentences */
    
                s = &(r->result.sent[n]);
                seq = s->word;
                seqnum = s->word_num;
    
                /* output word sequence like Julius */
                qDebug()<<"Sentence :";
    //            QStringList WordList;
                for(i=0;i<seqnum;i++)
                {
                      qDebug()<<winfo->woutput[seq[i]];
    //                char *data1= winfo->woutput[seq[i]];
    //                qDebug()<<"char *data>>>>>>>"<<data1<<endl;
    //                QString strData= data1;
    //                WordList.append(strData);
                }
    //            qDebug()<<"String list is >>>>>"<<WordList<<endl;
    //            qDebug()<<"Wordlist length is>>>"<<WordList.length()<<endl;
    //            if(!WordList.isEmpty())
    //            {
    //                WordList.removeFirst();
    //                WordList.removeLast();
    //                QMessageBox m_popupmsgbox;
    //                m_popupmsgbox.setWindowTitle("Voice Recognizer");
    //                QString Phoneme=WordList.join(" ");
    //                qDebug()<<"firstword"<<Phoneme<<endl;
    //                QSpacerItem* horizontalSpacer = new QSpacerItem(500, 100, QSizePolicy::Minimum, QSizePolicy::Expanding);
    //                m_popupmsgbox.setText( Phoneme);
    //                QGridLayout* layout = (QGridLayout*)m_popupmsgbox.layout();
    //                layout->addItem(horizontalSpacer, layout->rowCount(), 0, 1, layout->columnCount());
    //                m_popupmsgbox.exec();
    //            }
    //            else
    //            {
    //                qDebug()<<"List is empty"<<endl;
    //            }
                /* LM entry sequence */
                //            qDebug()<<"wseq :"<< n+1<<endl;
                for(i=0;i<seqnum;i++)
                    //            qDebug()<<winfo->wname[seq[i]]<<endl;
                    /* phoneme sequence */
                    //            qDebug()<<"phseq :"<< n+1<<endl;
    
                    put_hypo_phoneme(seq, seqnum, winfo);
    
                /* confidence scores */
                //            qDebug()<<"cmscore :"<< n+1<<endl;
                for (i=0;i<seqnum; i++)
                    //                printf(" %5.3f", s->confidence[i]);
                    //            qDebug()<<s->confidence[i]<<endl;
                    /* AM and LM scores */
                    //            printf("score%d: %f", n+1, s->score);
                    //            qDebug()<<"Score :"<<n+1<<" "<<s->score<<endl;
                    if (r->lmtype == LM_PROB) { /* if this process uses N-gram */
                        //                printf(" (AM: %f  LM: %f)", s->score_am, s->score_lm);
                        qDebug()<<"AM :"<<s->score_am<<"LM :"<<s->score_lm<<endl;
                    }
                if (r->lmtype == LM_DFA) { /* if this process uses DFA grammar */
                    /* output which grammar the hypothesis belongs to
           when using multiple grammars */
                    if (multigram_get_all_num(r->lm) > 1) {
                        //                    printf("grammar%d: %d\n", n+1, s->gram_id);
                        qDebug()<<"grammar :"<<n+1<<" "<<s->gram_id<<endl;
                    }
                }
    
                /* output alignment result if exist */
                for (align = s->align; align; align = align->next) {
                    //                printf("=== begin forced alignment ===\n");
                    qDebug()<<"=== begin forced alignment ==="<<endl;
    
                    switch(align->unittype) {
                    case PER_WORD:
                        //                    printf("-- word alignment --\n");
                        qDebug()<<"-- word alignment --"<<endl;
                        break;
                    case PER_PHONEME:
                        //                    printf("-- phoneme alignment --\n");
                        qDebug()<<"-- phoneme alignment --"<<endl;
                        break;
                    case PER_STATE:
                        //                    printf("-- state alignment --\n");
                        qDebug()<<"-- state alignment --"<<endl;
                        break;
                    }
                    //                printf(" id: from  to    n_score    unit\n");
                    qDebug()<<" id: from  to    n_score    unit"<<endl;
                    //                printf(" ----------------------------------------\n");
                    qDebug()<<" ----------------------------------------"<<endl;
                    for(i=0;i<align->num;i++) {
                        qDebug("[%4d %4d]  %f  ", align->begin_frame[i], align->end_frame[i], align->avgscore[i]);
                        switch(align->unittype) {
                        case PER_WORD:
                            qDebug("%s\t[%s]\n", winfo->wname[align->w[i]], winfo->woutput[align->w[i]]);
                            break;
                        case PER_PHONEME:
                            p = align->ph[i];
                            if (p->is_pseudo) {
                                qDebug("{%s}\n", p->name);
                            } else if (strmatch(p->name, p->body.defined->name)) {
                                qDebug("%s\n", p->name);
                            } else {
                                qDebug("%s[%s]\n", p->name, p->body.defined->name);
                            }
                            break;
                        case PER_STATE:
                            p = align->ph[i];
                            if (p->is_pseudo) {
                                qDebug("{%s}", p->name);
                            } else if (strmatch(p->name, p->body.defined->name)) {
                                qDebug("%s", p->name);
                            } else {
                                qDebug("%s[%s]", p->name, p->body.defined->name);
                            }
                            if (r->am->hmminfo->multipath) {
                                if (align->is_iwsp[i]) {
                                    qDebug(" #%d (sp)\n", align->loc[i]);
                                } else {
                                    qDebug(" #%d\n", align->loc[i]);
                                }
                            } else {
                                qDebug(" #%d\n", align->loc[i]);
                            }
                            break;
                        }
                    }
    
                    qDebug("re-computed AM score: %f\n", align->allscore);
    
                    qDebug("=== end forced alignment ===\n");
                }
            }
        }
    }
    

    Naveen_D

    1 Reply Last reply
    0

    1/1

    1 Dec 2016, 09:43

    • Login

    • Login or register to search.
    1 out of 1
    • First post
      1/1
      Last post
    0
    • Categories
    • Recent
    • Tags
    • Popular
    • Users
    • Groups
    • Search
    • Get Qt Extensions
    • Unsolved