Child: [25cece] (diff)

Download this file

kio_recoll.h    190 lines (169 with data), 6.2 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
#ifndef _RECOLL_H
#define _RECOLL_H
/* Copyright (C) 2005 J.F.Dockes
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the
* Free Software Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#include <string>
using std::string;
#include <qglobal.h>
#include <qstring.h>
#include <kurl.h>
#include <kio/global.h>
#include <kio/slavebase.h>
#include <kdeversion.h>
#include "rclconfig.h"
#include "rcldb.h"
#include "reslistpager.h"
#include "docseq.h"
#include "refcntr.h"
class RecollProtocol;
/** Specialize the recoll html pager for the kind of links we use etc. */
class RecollKioPager : public ResListPager {
public:
RecollKioPager() : m_parent(0) {}
void setParent(RecollProtocol *proto) {m_parent = proto;}
virtual bool append(const string& data);
virtual bool append(const string& data, int, const Rcl::Doc&)
{return append(data);}
virtual string detailsLink();
virtual const string &parFormat();
virtual string nextUrl();
virtual string prevUrl();
virtual string pageTop();
private:
RecollProtocol *m_parent;
};
class QueryDesc {
public:
QueryDesc() : opt("l"), page(0), isDetReq(false) {}
QString query;
QString opt;
int page;
bool isDetReq;
bool sameQuery(const QueryDesc& o) const {
return !opt.compare(o.opt) && !query.compare(o.query);
}
};
// Our virtual tree is a bit complicated. We need a class to analyse an URL
// and tell what we should do with it
class UrlIngester {
public:
UrlIngester(RecollProtocol *p, const KUrl& url);
enum RootEntryType {UIRET_NONE, UIRET_ROOT, UIRET_HELP, UIRET_SEARCH};
bool isRootEntry(RootEntryType *tp) {
if (m_type != UIMT_ROOTENTRY) return false;
*tp = m_retType;
return true;
}
bool isQuery(QueryDesc *q) {
if (m_type != UIMT_QUERY) return false;
*q = m_query;
return true;
}
bool isResult(QueryDesc *q, int *num) {
if (m_type != UIMT_QUERYRESULT) return false;
*q = m_query;
*num = m_resnum;
return true;
}
bool isPreview(QueryDesc *q, int *num) {
if (m_type != UIMT_PREVIEW) return false;
*q = m_query;
*num = m_resnum;
return true;
}
bool endSlashQuery() {return m_slashend;}
bool alwaysDir() {return m_alwaysdir;}
private:
RecollProtocol *m_parent;
QueryDesc m_query;
bool m_slashend;
bool m_alwaysdir;
RootEntryType m_retType;
int m_resnum;
enum MyType {UIMT_NONE, UIMT_ROOTENTRY, UIMT_QUERY, UIMT_QUERYRESULT,
UIMT_PREVIEW};
MyType m_type;
};
/**
* A KIO slave to execute and display Recoll searches.
*
* Things are made a little complicated because KIO slaves can't hope
* that their internal state will remain consistent with their user
* application state: slaves die, are restarted, reused, at random
* between requests.
* In our case, this means that any request has to be processed
* without reference to the last operation performed. Ie, if the
* search parameters are not those from the last request, the search
* must be restarted anew. This happens for example with different
* searches in 2 konqueror screens: typically only one kio_slave will
* be used.
* The fact that we check if the search is the same as the last one,
* to avoid restarting is an optimization, not the base mechanism
* (contrary to what was initially assumed, and may have left a few
* crumbs around).
*
* We have two modes of operation, one based on html forms and result
* pages, which can potentially be developped to the full Recoll
* functionality, and one based on a directory listing model, which
* will always be more limited, but may be useful in some cases to
* allow easy copying of files etc. Which one is in use is decided by
* the form of the URL.
*/
class RecollProtocol : public KIO::SlaveBase {
public:
RecollProtocol(const QByteArray &pool, const QByteArray &app );
virtual ~RecollProtocol();
virtual void mimetype(const KUrl& url);
virtual void get(const KUrl& url);
// The directory mode is not available with KDE 4.0, I could find
// no way to avoid crashing kdirmodel
#if KDE_IS_VERSION(4,1,0)
virtual void stat(const KUrl & url);
virtual void listDir(const KUrl& url);
#endif
static RclConfig *o_rclconfig;
friend class RecollKioPager;
friend class UrlIngester;
private:
bool maybeOpenDb(string& reason);
bool URLToQuery(const KUrl &url, QString& q, QString& opt, int *page=0);
bool doSearch(const QueryDesc& qd);
void searchPage();
void queryDetails();
string makeQueryUrl(int page, bool isdet = false);
bool syncSearch(const QueryDesc& qd);
void htmlDoSearch(const QueryDesc& qd);
void showPreview(const Rcl::Doc& doc);
bool isRecollResult(const KUrl &url, int *num, QString* q);
bool m_initok;
Rcl::Db *m_rcldb;
string m_reason;
bool m_alwaysdir;
// Search state: because of how the KIO slaves are used / reused,
// we can't be sure that the next request will be for the same
// search, and we need to check and restart one if the data
// changes. This is very wasteful but hopefully won't happen too
// much in actual use. One possible workaround for some scenarios
// (one slave several konqueror windows) would be to have a small
// cache of recent searches kept open.
RecollKioPager m_pager;
RefCntr<DocSequence> m_source;
// Note: page here is not used, current page always comes from m_pager.
QueryDesc m_query;
};
extern "C" {int kdemain(int, char**);}
#endif // _RECOLL_H