| | |
| | | import org.apache.lucene.search.IndexSearcher;
|
| | | import org.apache.lucene.search.Query;
|
| | | import org.apache.lucene.search.ScoreDoc;
|
| | | import org.apache.lucene.search.TermQuery;
|
| | | import org.apache.lucene.search.TopScoreDocCollector;
|
| | | import org.apache.lucene.search.highlight.Fragmenter;
|
| | | import org.apache.lucene.search.highlight.Highlighter;
|
| | |
| | | import org.slf4j.LoggerFactory;
|
| | |
|
| | | import com.gitblit.Constants.SearchObjectType;
|
| | | import com.gitblit.models.IssueModel;
|
| | | import com.gitblit.models.IssueModel.Attachment;
|
| | | import com.gitblit.models.PathModel.PathChangeModel;
|
| | | import com.gitblit.models.RefModel;
|
| | | import com.gitblit.models.RepositoryModel;
|
| | | import com.gitblit.models.SearchResult;
|
| | | import com.gitblit.utils.ArrayUtils;
|
| | | import com.gitblit.utils.IssueUtils;
|
| | | import com.gitblit.utils.JGitUtils;
|
| | | import com.gitblit.utils.StringUtils;
|
| | |
|
| | | /**
|
| | | * The Lucene executor handles indexing and searching repositories.
|
| | | * |
| | | *
|
| | | * @author James Moger
|
| | | * |
| | | *
|
| | | */
|
| | | public class LuceneExecutor implements Runnable {
|
| | | |
| | | |
| | |
|
| | |
|
| | | private static final int INDEX_VERSION = 5;
|
| | |
|
| | | private static final String FIELD_OBJECT_TYPE = "type";
|
| | | private static final String FIELD_ISSUE = "issue";
|
| | | private static final String FIELD_PATH = "path";
|
| | | private static final String FIELD_COMMIT = "commit";
|
| | | private static final String FIELD_BRANCH = "branch";
|
| | |
| | | private static final String FIELD_COMMITTER = "committer";
|
| | | private static final String FIELD_DATE = "date";
|
| | | private static final String FIELD_TAG = "tag";
|
| | | private static final String FIELD_LABEL = "label";
|
| | | private static final String FIELD_ATTACHMENT = "attachment";
|
| | |
|
| | | private static final String CONF_FILE = "lucene.conf";
|
| | | private static final String LUCENE_DIR = "lucene";
|
| | |
| | | private static final String CONF_VERSION = "version";
|
| | | private static final String CONF_ALIAS = "aliases";
|
| | | private static final String CONF_BRANCH = "branches";
|
| | | |
| | |
|
| | | private static final Version LUCENE_VERSION = Version.LUCENE_35;
|
| | | |
| | |
|
| | | private final Logger logger = LoggerFactory.getLogger(LuceneExecutor.class);
|
| | | |
| | |
|
| | | private final IStoredSettings storedSettings;
|
| | | private final File repositoriesFolder;
|
| | | |
| | |
|
| | | private final Map<String, IndexSearcher> searchers = new ConcurrentHashMap<String, IndexSearcher>();
|
| | | private final Map<String, IndexWriter> writers = new ConcurrentHashMap<String, IndexWriter>();
|
| | | |
| | |
|
| | | private final String luceneIgnoreExtensions = "7z arc arj bin bmp dll doc docx exe gif gz jar jpg lib lzh odg odf odt pdf ppt png so swf xcf xls xlsx zip";
|
| | | private Set<String> excludedExtensions;
|
| | | |
| | |
|
| | | public LuceneExecutor(IStoredSettings settings, File repositoriesFolder) {
|
| | | this.storedSettings = settings;
|
| | | this.repositoriesFolder = repositoriesFolder;
|
| | |
| | | }
|
| | |
|
| | | /**
|
| | | * Run is executed by the Gitblit executor service. Because this is called |
| | | * Run is executed by the Gitblit executor service. Because this is called
|
| | | * by an executor service, calls will queue - i.e. there can never be
|
| | | * concurrent execution of repository index updates.
|
| | | */
|
| | |
| | | // busy collecting garbage, try again later
|
| | | return;
|
| | | }
|
| | | |
| | |
|
| | | for (String repositoryName: GitBlit.self().getRepositoryList()) {
|
| | | RepositoryModel model = GitBlit.self().getRepositoryModel(repositoryName);
|
| | | if (model.hasCommits && !ArrayUtils.isEmpty(model.indexedBranches)) {
|
| | |
| | | }
|
| | | continue;
|
| | | }
|
| | | index(model, repository); |
| | | index(model, repository);
|
| | | repository.close();
|
| | | System.gc();
|
| | | }
|
| | |
| | | /**
|
| | | * Synchronously indexes a repository. This may build a complete index of a
|
| | | * repository or it may update an existing index.
|
| | | * |
| | | *
|
| | | * @param name
|
| | | * the name of the repository
|
| | | * @param repository
|
| | |
| | | logger.error(MessageFormat.format("Lucene indexing failure for {0}", model.name), t);
|
| | | }
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Close the writer/searcher objects for a repository.
|
| | | * |
| | | *
|
| | | * @param repositoryName
|
| | | */
|
| | | public synchronized void close(String repositoryName) {
|
| | |
| | | } catch (Exception e) {
|
| | | logger.error("Failed to close index searcher for " + repositoryName, e);
|
| | | }
|
| | | |
| | |
|
| | | try {
|
| | | IndexWriter writer = writers.remove(repositoryName);
|
| | | if (writer != null) {
|
| | |
| | | }
|
| | | } catch (Exception e) {
|
| | | logger.error("Failed to close index writer for " + repositoryName, e);
|
| | | } |
| | | }
|
| | | }
|
| | |
|
| | | /**
|
| | | * Close all Lucene indexers.
|
| | | * |
| | | *
|
| | | */
|
| | | public synchronized void close() {
|
| | | // close all writers
|
| | |
| | | searchers.clear();
|
| | | }
|
| | |
|
| | | |
| | |
|
| | | /**
|
| | | * Deletes the Lucene index for the specified repository.
|
| | | * |
| | | *
|
| | | * @param repositoryName
|
| | | * @return true, if successful
|
| | | */
|
| | |
| | | throw new RuntimeException(e);
|
| | | }
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Returns the author for the commit, if this information is available.
|
| | | * |
| | | *
|
| | | * @param commit
|
| | | * @return an author or unknown
|
| | | */
|
| | |
| | | if (StringUtils.isEmpty(name)) {
|
| | | name = commit.getAuthorIdent().getEmailAddress();
|
| | | }
|
| | | } catch (NullPointerException n) { |
| | | } catch (NullPointerException n) {
|
| | | }
|
| | | return name;
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Returns the committer for the commit, if this information is available.
|
| | | * |
| | | *
|
| | | * @param commit
|
| | | * @return an committer or unknown
|
| | | */
|
| | |
| | | if (StringUtils.isEmpty(name)) {
|
| | | name = commit.getCommitterIdent().getEmailAddress();
|
| | | }
|
| | | } catch (NullPointerException n) { |
| | | } catch (NullPointerException n) {
|
| | | }
|
| | | return name;
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Get the tree associated with the given commit.
|
| | | *
|
| | |
| | |
|
| | | /**
|
| | | * Construct a keyname from the branch.
|
| | | * |
| | | *
|
| | | * @param branchName
|
| | | * @return a keyname appropriate for the Git config file format
|
| | | */
|
| | |
| | |
|
| | | /**
|
| | | * Returns the Lucene configuration for the specified repository.
|
| | | * |
| | | *
|
| | | * @param repository
|
| | | * @return a config object
|
| | | */
|
| | |
| | | * Reads the Lucene config file for the repository to check the index
|
| | | * version. If the index version is different, then rebuild the repository
|
| | | * index.
|
| | | * |
| | | *
|
| | | * @param repository
|
| | | * @return true of the on-disk index format is different than INDEX_VERSION
|
| | | */
|
| | |
| | | /**
|
| | | * This completely indexes the repository and will destroy any existing
|
| | | * index.
|
| | | * |
| | | *
|
| | | * @param repositoryName
|
| | | * @param repository
|
| | | * @return IndexResult
|
| | | */
|
| | | public IndexResult reindex(RepositoryModel model, Repository repository) {
|
| | | IndexResult result = new IndexResult(); |
| | | IndexResult result = new IndexResult();
|
| | | if (!deleteIndex(model.name)) {
|
| | | return result;
|
| | | }
|
| | |
| | | }
|
| | | tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName);
|
| | | }
|
| | | |
| | |
|
| | | ObjectReader reader = repository.newObjectReader();
|
| | |
|
| | | // get the local branches
|
| | | List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
|
| | | |
| | |
|
| | | // sort them by most recently updated
|
| | | Collections.sort(branches, new Comparator<RefModel>() {
|
| | | @Override
|
| | |
| | | return ref2.getDate().compareTo(ref1.getDate());
|
| | | }
|
| | | });
|
| | | |
| | |
|
| | | // reorder default branch to first position
|
| | | RefModel defaultBranch = null;
|
| | | ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository);
|
| | |
| | | }
|
| | | branches.remove(defaultBranch);
|
| | | branches.add(0, defaultBranch);
|
| | | |
| | |
|
| | | // walk through each branch
|
| | | for (RefModel branch : branches) {
|
| | |
|
| | |
| | | && branch.equals(defaultBranch)) {
|
| | | // indexing "default" branch
|
| | | indexBranch = true;
|
| | | } else if (IssueUtils.GB_ISSUES.equals(branch)) {
|
| | | // skip the GB_ISSUES branch because it is indexed later
|
| | | // note: this is different than updateIndex
|
| | | } else if (branch.getName().startsWith(com.gitblit.Constants.R_GITBLIT)) {
|
| | | // skip Gitblit internal branches
|
| | | indexBranch = false;
|
| | | } else {
|
| | | // normal explicit branch check
|
| | | indexBranch = model.indexedBranches.contains(branch.getName());
|
| | | }
|
| | | |
| | |
|
| | | // if this branch is not specifically indexed then skip
|
| | | if (!indexBranch) {
|
| | | continue;
|
| | |
| | | // index the blob contents of the tree
|
| | | TreeWalk treeWalk = new TreeWalk(repository);
|
| | | treeWalk.addTree(tip.getTree());
|
| | | treeWalk.setRecursive(true); |
| | | |
| | | treeWalk.setRecursive(true);
|
| | |
|
| | | Map<String, ObjectId> paths = new TreeMap<String, ObjectId>();
|
| | | while (treeWalk.next()) {
|
| | | // ensure path is not in a submodule
|
| | | if (treeWalk.getFileMode(0) != FileMode.GITLINK) {
|
| | | paths.put(treeWalk.getPathString(), treeWalk.getObjectId(0));
|
| | | }
|
| | | } |
| | | }
|
| | |
|
| | | ByteArrayOutputStream os = new ByteArrayOutputStream();
|
| | | byte[] tmp = new byte[32767];
|
| | |
|
| | | RevWalk commitWalk = new RevWalk(reader);
|
| | | commitWalk.markStart(tip);
|
| | | |
| | |
|
| | | RevCommit commit;
|
| | | while ((paths.size() > 0) && (commit = commitWalk.next()) != null) {
|
| | | TreeWalk diffWalk = new TreeWalk(reader);
|
| | |
| | | if (!paths.containsKey(path)) {
|
| | | continue;
|
| | | }
|
| | | |
| | |
|
| | | // remove path from set
|
| | | ObjectId blobId = paths.remove(path);
|
| | | result.blobCount++;
|
| | | |
| | |
|
| | | // index the blob metadata
|
| | | String blobAuthor = getAuthor(commit);
|
| | | String blobCommitter = getCommitter(commit);
|
| | | String blobDate = DateTools.timeToString(commit.getCommitTime() * 1000L,
|
| | | Resolution.MINUTE);
|
| | | |
| | |
|
| | | Document doc = new Document();
|
| | | doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), Store.YES, Index.NOT_ANALYZED_NO_NORMS));
|
| | | doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
|
| | |
| | | doc.add(new Field(FIELD_PATH, path, Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_DATE, blobDate, Store.YES, Index.NO));
|
| | | doc.add(new Field(FIELD_AUTHOR, blobAuthor, Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_COMMITTER, blobCommitter, Store.YES, Index.ANALYZED)); |
| | | doc.add(new Field(FIELD_COMMITTER, blobCommitter, Store.YES, Index.ANALYZED));
|
| | |
|
| | | // determine extension to compare to the extension
|
| | | // blacklist
|
| | |
| | | }
|
| | |
|
| | | // index the blob content
|
| | | if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) { |
| | | if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) {
|
| | | ObjectLoader ldr = repository.open(blobId, Constants.OBJ_BLOB);
|
| | | InputStream in = ldr.openStream(); |
| | | InputStream in = ldr.openStream();
|
| | | int n;
|
| | | while ((n = in.read(tmp)) > 0) {
|
| | | os.write(tmp, 0, n);
|
| | | }
|
| | | in.close();
|
| | | byte[] content = os.toByteArray();
|
| | | String str = StringUtils.decodeString(content, encodings); |
| | | String str = StringUtils.decodeString(content, encodings);
|
| | | doc.add(new Field(FIELD_CONTENT, str, Store.YES, Index.ANALYZED));
|
| | | os.reset();
|
| | | } |
| | | |
| | | }
|
| | |
|
| | | // add the blob to the index
|
| | | writer.addDocument(doc);
|
| | | }
|
| | |
| | |
|
| | | // finished
|
| | | reader.release();
|
| | | |
| | | // this repository has a gb-issues branch, index all issues
|
| | | if (IssueUtils.getIssuesBranch(repository) != null) {
|
| | | List<IssueModel> issues = IssueUtils.getIssues(repository, null);
|
| | | if (issues.size() > 0) {
|
| | | result.branchCount += 1;
|
| | | }
|
| | | for (IssueModel issue : issues) {
|
| | | result.issueCount++;
|
| | | Document doc = createDocument(issue);
|
| | | writer.addDocument(doc);
|
| | | }
|
| | | }
|
| | |
|
| | | // commit all changes and reset the searcher
|
| | | config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION);
|
| | |
| | | }
|
| | | return result;
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Incrementally update the index with the specified commit for the
|
| | | * repository.
|
| | | * |
| | | *
|
| | | * @param repositoryName
|
| | | * @param repository
|
| | | * @param branch
|
| | |
| | | * @param commit
|
| | | * @return true, if successful
|
| | | */
|
| | | private IndexResult index(String repositoryName, Repository repository, |
| | | private IndexResult index(String repositoryName, Repository repository,
|
| | | String branch, RevCommit commit) {
|
| | | IndexResult result = new IndexResult();
|
| | | try {
|
| | |
| | | }
|
| | | }
|
| | | writer.commit();
|
| | | |
| | |
|
| | | // get any annotated commit tags
|
| | | List<String> commitTags = new ArrayList<String>();
|
| | | for (RefModel ref : JGitUtils.getTags(repository, false, -1)) {
|
| | |
| | | commitTags.add(ref.displayName);
|
| | | }
|
| | | }
|
| | | |
| | |
|
| | | // create and write the Lucene document
|
| | | Document doc = createDocument(commit, commitTags);
|
| | | doc.add(new Field(FIELD_BRANCH, branch, Store.YES, Index.ANALYZED));
|
| | |
| | | }
|
| | |
|
| | | /**
|
| | | * Incrementally update the index with the specified issue for the
|
| | | * repository.
|
| | | * |
| | | * Delete a blob from the specified branch of the repository index.
|
| | | *
|
| | | * @param repositoryName
|
| | | * @param issue
|
| | | * @return true, if successful
|
| | | */
|
| | | public boolean index(String repositoryName, IssueModel issue) {
|
| | | try {
|
| | | // delete the old issue from the index, if exists
|
| | | deleteIssue(repositoryName, issue.id);
|
| | | Document doc = createDocument(issue);
|
| | | return index(repositoryName, doc);
|
| | | } catch (Exception e) {
|
| | | logger.error(MessageFormat.format("Error while indexing issue {0} in {1}", issue.id, repositoryName), e);
|
| | | }
|
| | | return false;
|
| | | }
|
| | | |
| | | /**
|
| | | * Delete an issue from the repository index.
|
| | | * |
| | | * @param repositoryName
|
| | | * @param issueId
|
| | | * @param branch
|
| | | * @param path
|
| | | * @throws Exception
|
| | | * @return true, if deleted, false if no record was deleted
|
| | | */
|
| | | private boolean deleteIssue(String repositoryName, String issueId) throws Exception {
|
| | | public boolean deleteBlob(String repositoryName, String branch, String path) throws Exception {
|
| | | String pattern = MessageFormat.format("{0}:'{'0} AND {1}:\"'{'1'}'\" AND {2}:\"'{'2'}'\"", FIELD_OBJECT_TYPE, FIELD_BRANCH, FIELD_PATH);
|
| | | String q = MessageFormat.format(pattern, SearchObjectType.blob.name(), branch, path);
|
| | |
|
| | | BooleanQuery query = new BooleanQuery();
|
| | | Term objectTerm = new Term(FIELD_OBJECT_TYPE, SearchObjectType.issue.name());
|
| | | query.add(new TermQuery(objectTerm), Occur.MUST);
|
| | | Term issueidTerm = new Term(FIELD_ISSUE, issueId);
|
| | | query.add(new TermQuery(issueidTerm), Occur.MUST);
|
| | | |
| | | StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
|
| | | QueryParser qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer);
|
| | | query.add(qp.parse(q), Occur.MUST);
|
| | |
|
| | | IndexWriter writer = getIndexWriter(repositoryName);
|
| | | int numDocsBefore = writer.numDocs();
|
| | | writer.deleteDocuments(query);
|
| | |
| | | return true;
|
| | | }
|
| | | }
|
| | | |
| | | /**
|
| | | * Delete a blob from the specified branch of the repository index.
|
| | | * |
| | | * @param repositoryName
|
| | | * @param branch
|
| | | * @param path
|
| | | * @throws Exception
|
| | | * @return true, if deleted, false if no record was deleted
|
| | | */
|
| | | public boolean deleteBlob(String repositoryName, String branch, String path) throws Exception {
|
| | | String pattern = MessageFormat.format("{0}:'{'0} AND {1}:\"'{'1'}'\" AND {2}:\"'{'2'}'\"", FIELD_OBJECT_TYPE, FIELD_BRANCH, FIELD_PATH);
|
| | | String q = MessageFormat.format(pattern, SearchObjectType.blob.name(), branch, path);
|
| | | |
| | | BooleanQuery query = new BooleanQuery();
|
| | | StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
|
| | | QueryParser qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer);
|
| | | query.add(qp.parse(q), Occur.MUST);
|
| | |
|
| | | IndexWriter writer = getIndexWriter(repositoryName);
|
| | | int numDocsBefore = writer.numDocs();
|
| | | writer.deleteDocuments(query); |
| | | writer.commit();
|
| | | int numDocsAfter = writer.numDocs();
|
| | | if (numDocsBefore == numDocsAfter) {
|
| | | logger.debug(MessageFormat.format("no records found to delete {0}", query.toString()));
|
| | | return false;
|
| | | } else {
|
| | | logger.debug(MessageFormat.format("deleted {0} records with {1}", numDocsBefore - numDocsAfter, query.toString()));
|
| | | return true;
|
| | | }
|
| | | }
|
| | |
|
| | | /**
|
| | | * Updates a repository index incrementally from the last indexed commits.
|
| | | * |
| | | *
|
| | | * @param model
|
| | | * @param repository
|
| | | * @return IndexResult
|
| | |
| | |
|
| | | // get the local branches
|
| | | List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
|
| | | |
| | |
|
| | | // sort them by most recently updated
|
| | | Collections.sort(branches, new Comparator<RefModel>() {
|
| | | @Override
|
| | |
| | | return ref2.getDate().compareTo(ref1.getDate());
|
| | | }
|
| | | });
|
| | | |
| | |
|
| | | // reorder default branch to first position
|
| | | RefModel defaultBranch = null;
|
| | | ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository);
|
| | |
| | | }
|
| | | branches.remove(defaultBranch);
|
| | | branches.add(0, defaultBranch);
|
| | | |
| | |
|
| | | // walk through each branches
|
| | | for (RefModel branch : branches) {
|
| | | String branchName = branch.getName();
|
| | |
| | | && branch.equals(defaultBranch)) {
|
| | | // indexing "default" branch
|
| | | indexBranch = true;
|
| | | } else if (IssueUtils.GB_ISSUES.equals(branch)) {
|
| | | // update issues modified on the GB_ISSUES branch
|
| | | // note: this is different than reindex
|
| | | indexBranch = true;
|
| | | } else if (branch.getName().startsWith(com.gitblit.Constants.R_GITBLIT)) {
|
| | | // ignore internal Gitblit branches
|
| | | indexBranch = false;
|
| | | } else {
|
| | | // normal explicit branch check
|
| | | indexBranch = model.indexedBranches.contains(branch.getName());
|
| | | }
|
| | | |
| | |
|
| | | // if this branch is not specifically indexed then skip
|
| | | if (!indexBranch) {
|
| | | continue;
|
| | | }
|
| | | |
| | |
|
| | | // remove this branch from the deletedBranches set
|
| | | deletedBranches.remove(branchName);
|
| | | |
| | |
|
| | | // determine last commit
|
| | | String keyName = getBranchKey(branchName);
|
| | | String lastCommit = config.getString(CONF_BRANCH, null, keyName);
|
| | |
| | | if (revs.size() > 0) {
|
| | | result.branchCount += 1;
|
| | | }
|
| | | |
| | | // track the issue ids that we have already indexed
|
| | | Set<String> indexedIssues = new TreeSet<String>();
|
| | | |
| | | // reverse the list of commits so we start with the first commit |
| | |
|
| | | // reverse the list of commits so we start with the first commit
|
| | | Collections.reverse(revs);
|
| | | for (RevCommit commit : revs) { |
| | | if (IssueUtils.GB_ISSUES.equals(branch)) {
|
| | | // only index an issue once during updateIndex
|
| | | String issueId = commit.getShortMessage().substring(2).trim();
|
| | | if (indexedIssues.contains(issueId)) {
|
| | | continue;
|
| | | }
|
| | | indexedIssues.add(issueId);
|
| | | |
| | | IssueModel issue = IssueUtils.getIssue(repository, issueId);
|
| | | if (issue == null) {
|
| | | // issue was deleted, remove from index
|
| | | if (!deleteIssue(model.name, issueId)) {
|
| | | logger.error(MessageFormat.format("Failed to delete issue {0} from Lucene index!", issueId));
|
| | | }
|
| | | } else {
|
| | | // issue was updated
|
| | | index(model.name, issue);
|
| | | result.issueCount++;
|
| | | }
|
| | | } else {
|
| | | // index a commit
|
| | | result.add(index(model.name, repository, branchName, commit));
|
| | | }
|
| | | for (RevCommit commit : revs) {
|
| | | // index a commit
|
| | | result.add(index(model.name, repository, branchName, commit));
|
| | | }
|
| | |
|
| | | // update the config
|
| | |
| | | }
|
| | | return result;
|
| | | }
|
| | | |
| | | /**
|
| | | * Creates a Lucene document from an issue.
|
| | | * |
| | | * @param issue
|
| | | * @return a Lucene document
|
| | | */
|
| | | private Document createDocument(IssueModel issue) {
|
| | | Document doc = new Document();
|
| | | doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.issue.name(), Store.YES,
|
| | | Field.Index.NOT_ANALYZED));
|
| | | doc.add(new Field(FIELD_ISSUE, issue.id, Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_BRANCH, IssueUtils.GB_ISSUES, Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_DATE, DateTools.dateToString(issue.created, Resolution.MINUTE),
|
| | | Store.YES, Field.Index.NO));
|
| | | doc.add(new Field(FIELD_AUTHOR, issue.reporter, Store.YES, Index.ANALYZED));
|
| | | List<String> attachments = new ArrayList<String>();
|
| | | for (Attachment attachment : issue.getAttachments()) {
|
| | | attachments.add(attachment.name.toLowerCase());
|
| | | }
|
| | | doc.add(new Field(FIELD_ATTACHMENT, StringUtils.flattenStrings(attachments), Store.YES,
|
| | | Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_SUMMARY, issue.summary, Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_CONTENT, issue.toString(), Store.YES, Index.ANALYZED));
|
| | | doc.add(new Field(FIELD_LABEL, StringUtils.flattenStrings(issue.getLabels()), Store.YES,
|
| | | Index.ANALYZED));
|
| | | return doc;
|
| | | }
|
| | |
|
| | | /**
|
| | | * Creates a Lucene document for a commit
|
| | | * |
| | | *
|
| | | * @param commit
|
| | | * @param tags
|
| | | * @return a Lucene document
|
| | |
| | |
|
| | | /**
|
| | | * Incrementally index an object for the repository.
|
| | | * |
| | | *
|
| | | * @param repositoryName
|
| | | * @param doc
|
| | | * @return true, if successful
|
| | | */
|
| | | private boolean index(String repositoryName, Document doc) {
|
| | | try { |
| | | try {
|
| | | IndexWriter writer = getIndexWriter(repositoryName);
|
| | | writer.addDocument(doc);
|
| | | writer.commit();
|
| | |
| | | result.totalHits = totalHits;
|
| | | result.score = score;
|
| | | result.date = DateTools.stringToDate(doc.get(FIELD_DATE));
|
| | | result.summary = doc.get(FIELD_SUMMARY); |
| | | result.summary = doc.get(FIELD_SUMMARY);
|
| | | result.author = doc.get(FIELD_AUTHOR);
|
| | | result.committer = doc.get(FIELD_COMMITTER);
|
| | | result.type = SearchObjectType.fromName(doc.get(FIELD_OBJECT_TYPE));
|
| | | result.branch = doc.get(FIELD_BRANCH);
|
| | | result.commitId = doc.get(FIELD_COMMIT);
|
| | | result.issueId = doc.get(FIELD_ISSUE);
|
| | | result.path = doc.get(FIELD_PATH);
|
| | | if (doc.get(FIELD_TAG) != null) {
|
| | | result.tags = StringUtils.getStringsFromValue(doc.get(FIELD_TAG));
|
| | | }
|
| | | if (doc.get(FIELD_LABEL) != null) {
|
| | | result.labels = StringUtils.getStringsFromValue(doc.get(FIELD_LABEL));
|
| | | }
|
| | | return result;
|
| | | }
|
| | |
| | |
|
| | | /**
|
| | | * Gets an index searcher for the repository.
|
| | | * |
| | | *
|
| | | * @param repository
|
| | | * @return
|
| | | * @throws IOException
|
| | |
| | | /**
|
| | | * Gets an index writer for the repository. The index will be created if it
|
| | | * does not already exist or if forceCreate is specified.
|
| | | * |
| | | *
|
| | | * @param repository
|
| | | * @return an IndexWriter
|
| | | * @throws IOException
|
| | | */
|
| | | private IndexWriter getIndexWriter(String repository) throws IOException {
|
| | | IndexWriter indexWriter = writers.get(repository); |
| | | IndexWriter indexWriter = writers.get(repository);
|
| | | File repositoryFolder = FileKey.resolve(new File(repositoriesFolder, repository), FS.DETECTED);
|
| | | File indexFolder = new File(repositoryFolder, LUCENE_DIR);
|
| | | Directory directory = FSDirectory.open(indexFolder); |
| | | Directory directory = FSDirectory.open(indexFolder);
|
| | |
|
| | | if (indexWriter == null) {
|
| | | if (!indexFolder.exists()) {
|
| | |
| | |
|
| | | /**
|
| | | * Searches the specified repositories for the given text or query
|
| | | * |
| | | *
|
| | | * @param text
|
| | | * if the text is null or empty, null is returned
|
| | | * @param page
|
| | |
| | | * a list of repositories to search. if no repositories are
|
| | | * specified null is returned.
|
| | | * @return a list of SearchResults in order from highest to the lowest score
|
| | | * |
| | | *
|
| | | */
|
| | | public List<SearchResult> search(String text, int page, int pageSize, List<String> repositories) {
|
| | | if (ArrayUtils.isEmpty(repositories)) {
|
| | |
| | | }
|
| | | return search(text, page, pageSize, repositories.toArray(new String[0]));
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Searches the specified repositories for the given text or query
|
| | | * |
| | | *
|
| | | * @param text
|
| | | * if the text is null or empty, null is returned
|
| | | * @param page
|
| | |
| | | * a list of repositories to search. if no repositories are
|
| | | * specified null is returned.
|
| | | * @return a list of SearchResults in order from highest to the lowest score
|
| | | * |
| | | *
|
| | | */
|
| | | public List<SearchResult> search(String text, int page, int pageSize, String... repositories) {
|
| | | if (StringUtils.isEmpty(text)) {
|
| | |
| | | qp = new QueryParser(LUCENE_VERSION, FIELD_CONTENT, analyzer);
|
| | | qp.setAllowLeadingWildcard(true);
|
| | | query.add(qp.parse(text), Occur.SHOULD);
|
| | | |
| | |
|
| | | IndexSearcher searcher;
|
| | | if (repositories.length == 1) {
|
| | | // single repository search
|
| | |
| | | MultiSourceReader reader = new MultiSourceReader(rdrs);
|
| | | searcher = new IndexSearcher(reader);
|
| | | }
|
| | | |
| | |
|
| | | Query rewrittenQuery = searcher.rewrite(query);
|
| | | logger.debug(rewrittenQuery.toString());
|
| | |
|
| | |
| | | int index = reader.getSourceIndex(docId);
|
| | | result.repository = repositories[index];
|
| | | }
|
| | | String content = doc.get(FIELD_CONTENT); |
| | | String content = doc.get(FIELD_CONTENT);
|
| | | result.fragment = getHighlightedFragment(analyzer, query, content, result);
|
| | | results.add(result);
|
| | | }
|
| | |
| | | }
|
| | | return new ArrayList<SearchResult>(results);
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * |
| | | *
|
| | | * @param analyzer
|
| | | * @param query
|
| | | * @param content
|
| | |
| | | String content, SearchResult result) throws IOException, InvalidTokenOffsetsException {
|
| | | if (content == null) {
|
| | | content = "";
|
| | | } |
| | | }
|
| | |
|
| | | int fragmentLength = SearchObjectType.commit == result.type ? 512 : 150;
|
| | |
|
| | | QueryScorer scorer = new QueryScorer(query, "content");
|
| | | Fragmenter fragmenter = new SimpleSpanFragmenter(scorer, fragmentLength); |
| | | Fragmenter fragmenter = new SimpleSpanFragmenter(scorer, fragmentLength);
|
| | |
|
| | | // use an artificial delimiter for the token
|
| | | String termTag = "!!--[";
|
| | | String termTagEnd = "]--!!";
|
| | | SimpleHTMLFormatter formatter = new SimpleHTMLFormatter(termTag, termTagEnd);
|
| | | Highlighter highlighter = new Highlighter(formatter, scorer); |
| | | Highlighter highlighter = new Highlighter(formatter, scorer);
|
| | | highlighter.setTextFragmenter(fragmenter);
|
| | |
|
| | | String [] fragments = highlighter.getBestFragments(analyzer, "content", content, 3);
|
| | |
| | | }
|
| | | return "<pre class=\"text\">" + StringUtils.escapeForHtml(fragment, true) + "</pre>";
|
| | | }
|
| | | |
| | |
|
| | | // make sure we have unique fragments
|
| | | Set<String> uniqueFragments = new LinkedHashSet<String>();
|
| | | for (String fragment : fragments) {
|
| | | uniqueFragments.add(fragment);
|
| | | }
|
| | | fragments = uniqueFragments.toArray(new String[uniqueFragments.size()]);
|
| | | |
| | |
|
| | | StringBuilder sb = new StringBuilder();
|
| | | for (int i = 0, len = fragments.length; i < len; i++) {
|
| | | String fragment = fragments[i];
|
| | |
| | |
|
| | | // determine position of the raw fragment in the content
|
| | | int pos = content.indexOf(raw);
|
| | | |
| | |
|
| | | // restore complete first line of fragment
|
| | | int c = pos;
|
| | | while (c > 0) {
|
| | |
| | | // inject leading chunk of first fragment line
|
| | | fragment = content.substring(c + 1, pos) + fragment;
|
| | | }
|
| | | |
| | |
|
| | | if (SearchObjectType.blob == result.type) {
|
| | | // count lines as offset into the content for this fragment
|
| | | int line = Math.max(1, StringUtils.countLines(content.substring(0, pos)));
|
| | | |
| | |
|
| | | // create fragment tag with line number and language
|
| | | String lang = "";
|
| | | String ext = StringUtils.getFileExtension(result.path).toLowerCase();
|
| | |
| | | lang = " lang-" + ext;
|
| | | }
|
| | | tag = MessageFormat.format("<pre class=\"prettyprint linenums:{0,number,0}{1}\">", line, lang);
|
| | | |
| | |
|
| | | }
|
| | | |
| | |
|
| | | sb.append(tag);
|
| | |
|
| | | // replace the artificial delimiter with html tags
|
| | |
| | | }
|
| | | }
|
| | | return sb.toString();
|
| | | } |
| | | |
| | | }
|
| | |
|
| | | /**
|
| | | * Simple class to track the results of an index update. |
| | | * Simple class to track the results of an index update.
|
| | | */
|
| | | private class IndexResult {
|
| | | long startTime = System.currentTimeMillis();
|
| | |
| | | int branchCount;
|
| | | int commitCount;
|
| | | int blobCount;
|
| | | int issueCount;
|
| | | |
| | |
|
| | | void add(IndexResult result) {
|
| | | this.branchCount += result.branchCount;
|
| | | this.commitCount += result.commitCount;
|
| | | this.blobCount += result.blobCount;
|
| | | this.issueCount += result.issueCount; |
| | | }
|
| | | |
| | |
|
| | | void success() {
|
| | | success = true;
|
| | | endTime = System.currentTimeMillis();
|
| | | }
|
| | | |
| | |
|
| | | float duration() {
|
| | | return (endTime - startTime)/1000f;
|
| | | }
|
| | | }
|
| | | |
| | |
|
| | | /**
|
| | | * Custom subclass of MultiReader to identify the source index for a given
|
| | | * doc id. This would not be necessary of there was a public method to
|
| | | * obtain this information.
|
| | | * |
| | | *
|
| | | */
|
| | | private class MultiSourceReader extends MultiReader {
|
| | | |
| | |
|
| | | final Method method;
|
| | | |
| | |
|
| | | MultiSourceReader(IndexReader[] subReaders) {
|
| | | super(subReaders);
|
| | | Method m = null;
|
| | |
| | | }
|
| | | method = m;
|
| | | }
|
| | | |
| | |
|
| | | int getSourceIndex(int docId) {
|
| | | int index = -1;
|
| | | try {
|