@ -5,8 +5,11 @@
package code
package code
import (
import (
"bufio"
"context"
"context"
"fmt"
"fmt"
"io"
"io/ioutil"
"strconv"
"strconv"
"strings"
"strings"
"time"
"time"
@ -172,7 +175,7 @@ func (b *ElasticSearchIndexer) init() (bool, error) {
return exists , nil
return exists , nil
}
}
func ( b * ElasticSearchIndexer ) addUpdate ( sha string , update fileUpdate , repo * models . Repository ) ( [ ] elastic . BulkableRequest , error ) {
func ( b * ElasticSearchIndexer ) addUpdate ( batchWriter * io . PipeWriter , batchReader * bufio . Reader , sha string , update fileUpdate , repo * models . Repository ) ( [ ] elastic . BulkableRequest , error ) {
// Ignore vendored files in code search
// Ignore vendored files in code search
if setting . Indexer . ExcludeVendored && enry . IsVendor ( update . Filename ) {
if setting . Indexer . ExcludeVendored && enry . IsVendor ( update . Filename ) {
return nil , nil
return nil , nil
@ -195,8 +198,16 @@ func (b *ElasticSearchIndexer) addUpdate(sha string, update fileUpdate, repo *mo
return [ ] elastic . BulkableRequest { b . addDelete ( update . Filename , repo ) } , nil
return [ ] elastic . BulkableRequest { b . addDelete ( update . Filename , repo ) } , nil
}
}
fileContents , err := git . NewCommand ( "cat-file" , "blob" , update . BlobSha ) .
if _ , err := batchWriter . Write ( [ ] byte ( update . BlobSha + "\n" ) ) ; err != nil {
RunInDirBytes ( repo . RepoPath ( ) )
return nil , err
}
_ , _ , size , err := git . ReadBatchLine ( batchReader )
if err != nil {
return nil , err
}
fileContents , err := ioutil . ReadAll ( io . LimitReader ( batchReader , size ) )
if err != nil {
if err != nil {
return nil , err
return nil , err
} else if ! base . IsTextFile ( fileContents ) {
} else if ! base . IsTextFile ( fileContents ) {
@ -230,8 +241,13 @@ func (b *ElasticSearchIndexer) addDelete(filename string, repo *models.Repositor
// Index will save the index data
// Index will save the index data
func ( b * ElasticSearchIndexer ) Index ( repo * models . Repository , sha string , changes * repoChanges ) error {
func ( b * ElasticSearchIndexer ) Index ( repo * models . Repository , sha string , changes * repoChanges ) error {
reqs := make ( [ ] elastic . BulkableRequest , 0 )
reqs := make ( [ ] elastic . BulkableRequest , 0 )
if len ( changes . Updates ) > 0 {
batchWriter , batchReader , cancel := git . CatFileBatch ( repo . RepoPath ( ) )
defer cancel ( )
for _ , update := range changes . Updates {
for _ , update := range changes . Updates {
updateReqs , err := b . addUpdate ( sha , update , repo )
updateReqs , err := b . addUpdate ( batchWriter , batchReader , sha , update , repo )
if err != nil {
if err != nil {
return err
return err
}
}
@ -239,6 +255,8 @@ func (b *ElasticSearchIndexer) Index(repo *models.Repository, sha string, change
reqs = append ( reqs , updateReqs ... )
reqs = append ( reqs , updateReqs ... )
}
}
}
}
cancel ( )
}
for _ , filename := range changes . RemovedFilenames {
for _ , filename := range changes . RemovedFilenames {
reqs = append ( reqs , b . addDelete ( filename , repo ) )
reqs = append ( reqs , b . addDelete ( filename , repo ) )