Skip to content

Commit

Permalink
linter: some fields from RootWalker have been moved to the workspace.…
Browse files Browse the repository at this point in the history
…File structure (#875)

Thanks to this change, the number of fields in the
RootWalker has decreased, and the signatures of some
functions have been simplified, which now accept a
single argument of type `*workspace.File`.
  • Loading branch information
i582 authored Jan 15, 2021
1 parent e867a23 commit 40b2296
Show file tree
Hide file tree
Showing 16 changed files with 278 additions and 190 deletions.
6 changes: 3 additions & 3 deletions src/cmd/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -419,15 +419,15 @@ func LoadEmbeddedStubs(filenames []string) error {

readStubs := func(ch chan workspace.FileInfo) {
for _, filename := range filenames {
data, err := stubs.Asset(filename)
contents, err := stubs.Asset(filename)
if err != nil {
log.Printf("Failed to read embedded %q file: %v", filename, err)
atomic.AddInt64(&errorsCount, 1)
continue
}
ch <- workspace.FileInfo{
Filename: filename,
Contents: data,
Name: filename,
Contents: contents,
}
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/cmd/php-guru/dupcode/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,16 +100,16 @@ func Main(ctx *guru.Context) (int, error) {
irConverter := irconv.NewConverter(nil)
workerResult := make(funcSet)
for f := range filenamesCh {
data, err := ioutil.ReadFile(f.Filename)
data, err := ioutil.ReadFile(f.Name)
if err != nil {
log.Printf("read %s file: %v", f.Filename, err)
log.Printf("read %s file: %v", f.Name, err)
}
if !args.checkAutogen && workspace.FileIsAutoGenerated(data) {
continue
}
root, err := parseutil.ParseFile(data)
if err != nil {
log.Printf("parse %s file: %v", f.Filename, err)
log.Printf("parse %s file: %v", f.Name, err)
continue
}
rootIR := irConverter.ConvertRoot(root)
Expand All @@ -118,7 +118,7 @@ func Main(ctx *guru.Context) (int, error) {
st: &meta.ClassParseState{},
funcs: workerResult,
fileContents: data,
filename: f.Filename,
filename: f.Name,
args: &args,
normLevel: normLevel,
}
Expand Down
25 changes: 13 additions & 12 deletions src/langsrv/langsrv.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import (
"sync"
"time"

"go.lsp.dev/uri"

"github.com/VKCOM/noverify/src/ir"
"github.com/VKCOM/noverify/src/ir/irconv"
"github.com/VKCOM/noverify/src/lintdebug"
Expand All @@ -24,7 +26,6 @@ import (
"github.com/VKCOM/noverify/src/solver"
"github.com/VKCOM/noverify/src/vscode"
"github.com/VKCOM/noverify/src/workspace"
"go.lsp.dev/uri"
)

const maxLength = 16 << 20
Expand Down Expand Up @@ -177,7 +178,7 @@ func handleInitialize(req *baseRequest) error {
// other files are not analyzed fully at all
openMapMutex.Lock()
for filename, op := range openMap {
go openFile(filename, op.contents)
go openFile(filename, string(op.file.Contents()))
}
openMapMutex.Unlock()
}()
Expand Down Expand Up @@ -383,9 +384,9 @@ func handleTextDocumentDefinition(req *baseRequest) error {

result := make([]vscode.Location, 0)

if params.Position.Line < len(f.linesPositions) {
if params.Position.Line < f.file.NumLinesPosition() {
w := &definitionWalker{
position: f.linesPositions[params.Position.Line] + params.Position.Character,
position: f.file.LinePosition(params.Position.Line) + params.Position.Character,
scopes: f.scopes,
}
f.rootNode.Walk(w)
Expand Down Expand Up @@ -422,9 +423,9 @@ func handleTextDocumentReferences(req *baseRequest) error {

result := make([]vscode.Location, 0)

if params.Position.Line < len(f.linesPositions) {
if params.Position.Line < f.file.NumLinesPosition() {
w := &referencesWalker{
position: f.linesPositions[params.Position.Line] + params.Position.Character,
position: f.file.LinePosition(params.Position.Line) + params.Position.Character,
}
f.rootNode.Walk(w)
if len(w.result) > 0 {
Expand Down Expand Up @@ -491,20 +492,20 @@ func handleTextDocumentHover(req *baseRequest) (finalErr error) {
lnPos := params.Position.Line
chPos := params.Position.Character - 1

if lnPos >= len(f.lines) {
if lnPos >= f.file.NumLines() {
lintdebug.Send("Line out of range for file %s: %d", filename, lnPos)
return nil
}

ln := f.lines[lnPos]
ln := f.file.Line(lnPos)

if chPos < 0 || chPos >= len(ln) {
lintdebug.Send("Char out of range for file %s: line '%s', char %d", filename, ln, chPos)
return nil
}

compl := &completionWalker{
position: f.linesPositions[params.Position.Line] + params.Position.Character,
position: f.file.LinePosition(params.Position.Line) + params.Position.Character,
scopes: f.scopes,
}

Expand Down Expand Up @@ -634,19 +635,19 @@ func handleTextDocumentCompletion(req *baseRequest) error {
var ln []byte
var position int

if lnPos >= len(f.lines) {
if lnPos >= f.file.NumLines() {
lintdebug.Send("Line out of range for file %s: %d", filename, lnPos)
return nil
}

ln = f.lines[lnPos]
ln = f.file.Line(lnPos)

if chPos < 0 || chPos >= len(ln) {
lintdebug.Send("Char out of range for file %s: line '%s', char %d", filename, ln, chPos)
return nil
}

position = f.linesPositions[params.Position.Line] + params.Position.Character
position = f.file.LinePosition(params.Position.Line) + params.Position.Character

compl := &completionWalker{
position: position,
Expand Down
13 changes: 7 additions & 6 deletions src/langsrv/refs.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import (
"bytes"
"sync"

"go.lsp.dev/uri"

"github.com/VKCOM/noverify/src/ir"
"github.com/VKCOM/noverify/src/ir/irconv"
"github.com/VKCOM/noverify/src/lintdebug"
Expand All @@ -15,7 +17,6 @@ import (
"github.com/VKCOM/noverify/src/state"
"github.com/VKCOM/noverify/src/vscode"
"github.com/VKCOM/noverify/src/workspace"
"go.lsp.dev/uri"
)

type referencesWalker struct {
Expand Down Expand Up @@ -159,7 +160,7 @@ func copyOpenMap() map[string]string {
openMapMutex.Lock()
res := make(map[string]string, len(openMap))
for filename, info := range openMap {
res[filename] = info.contents
res[filename] = string(info.file.Contents())
}
openMapMutex.Unlock()

Expand Down Expand Up @@ -214,10 +215,10 @@ func findReferences(substr string, parse parseFn) []vscode.Location {
wg.Add(1)
go func() {
for fi := range ch {
contents, err := readFile(openMapCopy, fi.Filename)
contents, err := readFile(openMapCopy, fi.Name)
if err == nil && bytes.Contains(contents, substrBytes) {
func() {
waiter := linter.BeforeParse(len(contents), fi.Filename)
waiter := linter.BeforeParse(len(contents), fi.Name)
defer waiter.Finish()

parser := php7.NewParser(contents)
Expand All @@ -230,11 +231,11 @@ func findReferences(substr string, parse parseFn) []vscode.Location {
func() {
defer func() {
if r := recover(); r != nil {
lintdebug.Send("Panic while processing %s: %v", fi.Filename, r)
lintdebug.Send("Panic while processing %s: %v", fi.Name, r)
}
}()

found = parse(fi.Filename, rootIR, contents, parser)
found = parse(fi.Name, rootIR, contents, parser)
}()
resultMutex.Lock()
result = append(result, found...)
Expand Down
4 changes: 2 additions & 2 deletions src/langsrv/refsvisitors.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ func findMethodReferences(className string, methodName string) []vscode.Location
},
)

rootWalker.InitFromParser(contents, parser)
rootWalker.InitCustomFileData(filename, contents)

rootNode.Walk(rootWalker)
linter.AnalyzeFileRootLevel(rootNode, rootWalker)
Expand All @@ -108,7 +108,7 @@ func findPropertyReferences(className string, propName string) []vscode.Location
},
)

rootWalker.InitFromParser(contents, parser)
rootWalker.InitCustomFileData(filename, contents)

rootNode.Walk(rootWalker)
linter.AnalyzeFileRootLevel(rootNode, rootWalker)
Expand Down
23 changes: 15 additions & 8 deletions src/langsrv/state.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,20 @@ import (
"sync"
"time"

"go.lsp.dev/uri"

"github.com/VKCOM/noverify/src/ir"
"github.com/VKCOM/noverify/src/lintdebug"
"github.com/VKCOM/noverify/src/linter"
"github.com/VKCOM/noverify/src/meta"
"github.com/VKCOM/noverify/src/vscode"
"go.lsp.dev/uri"
"github.com/VKCOM/noverify/src/workspace"
)

type openedFile struct {
rootNode ir.Node
contents string
scopes map[ir.Node]*meta.Scope
lines [][]byte
linesPositions []int
rootNode ir.Node
scopes map[ir.Node]*meta.Scope
file *workspace.File
}

var (
Expand Down Expand Up @@ -48,7 +48,10 @@ func openFile(filename, contents string) {
}

openMapMutex.Lock()
openMap[filename] = openedFile{rootNode: rootNode, contents: contents}
openMap[filename] = openedFile{
rootNode: rootNode,
file: workspace.NewFile(filename, []byte(contents)),
}
openMapMutex.Unlock()
}

Expand Down Expand Up @@ -86,7 +89,11 @@ func changeFileNonLocked(filename, contents string) {
linter.AnalyzeFileRootLevel(rootNode, newWalker)

openMapMutex.Lock()
f := openedFile{rootNode, contents, w.Scopes, w.Lines, w.LinesPositions}
f := openedFile{
rootNode: rootNode,
scopes: w.Scopes,
file: w.File(),
}
openMap[filename] = f
openMapMutex.Unlock()

Expand Down
2 changes: 1 addition & 1 deletion src/linter/block_linter.go
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,7 @@ func (b *blockLinter) addFixForArray(arr *ir.ArrayExpr) {

from := arr.Position.StartPos
to := arr.Position.EndPos
have := b.walker.r.fileContents[from:to]
have := b.walker.r.file.Contents()[from:to]
have = bytes.TrimPrefix(have, []byte("array("))
have = bytes.TrimSuffix(have, []byte(")"))

Expand Down
14 changes: 7 additions & 7 deletions src/linter/custom.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/VKCOM/noverify/src/quickfix"
"github.com/VKCOM/noverify/src/rules"
"github.com/VKCOM/noverify/src/vscode"
"github.com/VKCOM/noverify/src/workspace"
)

// MetaCacher is an interface for integrating checker-specific
Expand Down Expand Up @@ -163,12 +164,11 @@ func (ctx *RootContext) Filename() string {
return ctx.w.ctx.st.CurrentFile
}

// FileContents returns analyzed file source code.
// Caller should not modify the returned slice.
// File returns analyzed file.
//
// Experimental API.
func (ctx *RootContext) FileContents() []byte {
return ctx.w.fileContents
func (ctx *RootContext) File() *workspace.File {
return ctx.w.file
}

// BlockContext is the context for block checker.
Expand Down Expand Up @@ -233,9 +233,9 @@ func (ctx *BlockContext) Filename() string {
return ctx.w.r.ctx.st.CurrentFile
}

// FileContents returns the content of the file being analyzed.
func (ctx *BlockContext) FileContents() []byte {
return ctx.w.r.fileContents
// File returns the file being analyzed.
func (ctx *BlockContext) File() *workspace.File {
return ctx.w.r.file
}

// AddQuickfix adds a new quick fix.
Expand Down
13 changes: 11 additions & 2 deletions src/linter/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,23 @@ import (
func ParseContents(filename string, contents []byte, lineRanges []git.LineRange, allowDisabled *regexp.Regexp) (root *ir.Root, walker *RootWalker, err error) {
w := NewLintingWorker(0)
w.AllowDisable = allowDisabled
return w.ParseContents(filename, contents, lineRanges)
file := workspace.FileInfo{
Name: filename,
Contents: contents,
LineRanges: lineRanges,
}
return w.ParseContents(file)
}

// IndexFile is a legacy way of indexing files.
// Deprecated: use Worker.IndexFile instead.
func IndexFile(filename string, contents []byte) error {
w := NewIndexingWorker(0)
return w.IndexFile(filename, contents)
file := workspace.FileInfo{
Name: filename,
Contents: contents,
}
return w.IndexFile(file)
}

func cloneRulesForFile(filename string, ruleSet *rules.ScopedSet) *rules.ScopedSet {
Expand Down
Loading

0 comments on commit 40b2296

Please sign in to comment.