Merge branch 'master' into feature/internal-msan

This commit is contained in:
Mauri de Souza Meneguzzo 2024-02-16 20:20:35 -03:00
commit 0a644bd6f1
221 changed files with 2931 additions and 3293 deletions

View File

@ -1,2 +1,8 @@
## Ports {#ports}
### Darwin {#darwin}
<!-- go.dev/issue/64207 -->
As [announced](go1.22#darwin) in the Go 1.22 release notes,
Go 1.23 requires macOS 11 Big Sur or later;
support for previous versions has been discontinued.

View File

@ -16,8 +16,10 @@ import (
"encoding/json"
"errors"
"fmt"
"internal/testenv"
"os"
"os/exec"
"os/user"
"path/filepath"
"regexp"
"strconv"
@ -266,12 +268,28 @@ func compilerSupportsLocation() bool {
case "gcc":
return compiler.major >= 10
case "clang":
// TODO(65606): The clang toolchain on the LUCI builders is not built against
// zlib, the ASAN runtime can't actually symbolize its own stack trace. Once
// this is resolved, one way or another, switch this back to 'true'. We still
// have coverage from the 'gcc' case above.
if inLUCIBuild() {
return false
}
return true
default:
return false
}
}
// inLUCIBuild returns true if we're currently executing in a LUCI build.
func inLUCIBuild() bool {
u, err := user.Current()
if err != nil {
return false
}
return testenv.Builder() != "" && u.Username == "swarming"
}
// compilerRequiredTsanVersion reports whether the compiler is the version required by Tsan.
// Only restrictions for ppc64le are known; otherwise return true.
func compilerRequiredTsanVersion(goos, goarch string) bool {

View File

@ -141,38 +141,29 @@ func CanInlineFuncs(funcs []*ir.Func, profile *pgo.Profile) {
PGOInlinePrologue(profile)
}
ir.VisitFuncsBottomUp(funcs, func(list []*ir.Func, recursive bool) {
CanInlineSCC(list, recursive, profile)
})
}
// CanInlineSCC computes the inlinability of functions within an SCC
// (strongly connected component).
//
// CanInlineSCC is designed to be used by ir.VisitFuncsBottomUp
// callbacks.
func CanInlineSCC(funcs []*ir.Func, recursive bool, profile *pgo.Profile) {
if base.Flag.LowerL == 0 {
return
}
numfns := numNonClosures(funcs)
ir.VisitFuncsBottomUp(funcs, func(funcs []*ir.Func, recursive bool) {
numfns := numNonClosures(funcs)
for _, fn := range funcs {
if !recursive || numfns > 1 {
// We allow inlining if there is no
// recursion, or the recursion cycle is
// across more than one function.
CanInline(fn, profile)
} else {
if base.Flag.LowerM > 1 && fn.OClosure == nil {
fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(fn), fn.Nname)
for _, fn := range funcs {
if !recursive || numfns > 1 {
// We allow inlining if there is no
// recursion, or the recursion cycle is
// across more than one function.
CanInline(fn, profile)
} else {
if base.Flag.LowerM > 1 && fn.OClosure == nil {
fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(fn), fn.Nname)
}
}
if inlheur.Enabled() {
analyzeFuncProps(fn, profile)
}
}
if inlheur.Enabled() {
analyzeFuncProps(fn, profile)
}
}
})
}
// GarbageCollectUnreferencedHiddenClosures makes a pass over all the

View File

@ -38,27 +38,16 @@ func DevirtualizeAndInlinePackage(pkg *ir.Package, profile *pgo.Profile) {
if base.Debug.PGOInline != 0 {
inlProfile = profile
}
if inlProfile != nil {
inline.PGOInlinePrologue(inlProfile)
// First compute inlinability of all functions in the package.
inline.CanInlineFuncs(pkg.Funcs, inlProfile)
// Now we make a second pass to do devirtualization and inlining of
// calls. Order here should not matter.
for _, fn := range pkg.Funcs {
DevirtualizeAndInlineFunc(fn, inlProfile)
}
ir.VisitFuncsBottomUp(pkg.Funcs, func(funcs []*ir.Func, recursive bool) {
// We visit functions within an SCC in fairly arbitrary order,
// so by computing inlinability for all functions in the SCC
// before performing any inlining, the results are less
// sensitive to the order within the SCC (see #58905 for an
// example).
// First compute inlinability for all functions in the SCC ...
inline.CanInlineSCC(funcs, recursive, inlProfile)
// ... then make a second pass to do devirtualization and inlining
// of calls.
for _, fn := range funcs {
DevirtualizeAndInlineFunc(fn, inlProfile)
}
})
if base.Flag.LowerL != 0 {
// Perform a garbage collection of hidden closures functions that
// are no longer reachable from top-level functions following

View File

@ -663,9 +663,24 @@ func (pr *pkgReader) objInstIdx(info objInfo, dict *readerDict, shaped bool) ir.
}
// objIdx returns the specified object, instantiated with the given
// type arguments, if any. If shaped is true, then the shaped variant
// of the object is returned instead.
// type arguments, if any.
// If shaped is true, then the shaped variant of the object is returned
// instead.
func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Type, shaped bool) ir.Node {
n, err := pr.objIdxMayFail(idx, implicits, explicits, shaped)
if err != nil {
base.Fatalf("%v", err)
}
return n
}
// objIdxMayFail is equivalent to objIdx, but returns an error rather than
// failing the build if this object requires type arguments and the incorrect
// number of type arguments were passed.
//
// Other sources of internal failure (such as duplicate definitions) still fail
// the build.
func (pr *pkgReader) objIdxMayFail(idx pkgbits.Index, implicits, explicits []*types.Type, shaped bool) (ir.Node, error) {
rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
_, sym := rname.qualifiedIdent()
tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
@ -674,22 +689,25 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Typ
assert(!sym.IsBlank())
switch sym.Pkg {
case types.BuiltinPkg, types.UnsafePkg:
return sym.Def.(ir.Node)
return sym.Def.(ir.Node), nil
}
if pri, ok := objReader[sym]; ok {
return pri.pr.objIdx(pri.idx, nil, explicits, shaped)
return pri.pr.objIdxMayFail(pri.idx, nil, explicits, shaped)
}
if sym.Pkg.Path == "runtime" {
return typecheck.LookupRuntime(sym.Name)
return typecheck.LookupRuntime(sym.Name), nil
}
base.Fatalf("unresolved stub: %v", sym)
}
dict := pr.objDictIdx(sym, idx, implicits, explicits, shaped)
dict, err := pr.objDictIdx(sym, idx, implicits, explicits, shaped)
if err != nil {
return nil, err
}
sym = dict.baseSym
if !sym.IsBlank() && sym.Def != nil {
return sym.Def.(*ir.Name)
return sym.Def.(*ir.Name), nil
}
r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
@ -725,7 +743,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Typ
name := do(ir.OTYPE, false)
setType(name, r.typ())
name.SetAlias(true)
return name
return name, nil
case pkgbits.ObjConst:
name := do(ir.OLITERAL, false)
@ -733,7 +751,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Typ
val := FixValue(typ, r.Value())
setType(name, typ)
setValue(name, val)
return name
return name, nil
case pkgbits.ObjFunc:
if sym.Name == "init" {
@ -768,7 +786,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Typ
}
rext.funcExt(name, nil)
return name
return name, nil
case pkgbits.ObjType:
name := do(ir.OTYPE, true)
@ -805,13 +823,13 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index, implicits, explicits []*types.Typ
r.needWrapper(typ)
}
return name
return name, nil
case pkgbits.ObjVar:
name := do(ir.ONAME, false)
setType(name, r.typ())
rext.varExt(name)
return name
return name, nil
}
}
@ -908,7 +926,7 @@ func shapify(targ *types.Type, basic bool) *types.Type {
}
// objDictIdx reads and returns the specified object dictionary.
func (pr *pkgReader) objDictIdx(sym *types.Sym, idx pkgbits.Index, implicits, explicits []*types.Type, shaped bool) *readerDict {
func (pr *pkgReader) objDictIdx(sym *types.Sym, idx pkgbits.Index, implicits, explicits []*types.Type, shaped bool) (*readerDict, error) {
r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
dict := readerDict{
@ -919,7 +937,7 @@ func (pr *pkgReader) objDictIdx(sym *types.Sym, idx pkgbits.Index, implicits, ex
nexplicits := r.Len()
if nimplicits > len(implicits) || nexplicits != len(explicits) {
base.Fatalf("%v has %v+%v params, but instantiated with %v+%v args", sym, nimplicits, nexplicits, len(implicits), len(explicits))
return nil, fmt.Errorf("%v has %v+%v params, but instantiated with %v+%v args", sym, nimplicits, nexplicits, len(implicits), len(explicits))
}
dict.targs = append(implicits[:nimplicits:nimplicits], explicits...)
@ -984,7 +1002,7 @@ func (pr *pkgReader) objDictIdx(sym *types.Sym, idx pkgbits.Index, implicits, ex
dict.itabs[i] = itabInfo{typ: r.typInfo(), iface: r.typInfo()}
}
return &dict
return &dict, nil
}
func (r *reader) typeParamNames() {
@ -2529,7 +2547,10 @@ func (pr *pkgReader) objDictName(idx pkgbits.Index, implicits, explicits []*type
base.Fatalf("unresolved stub: %v", sym)
}
dict := pr.objDictIdx(sym, idx, implicits, explicits, false)
dict, err := pr.objDictIdx(sym, idx, implicits, explicits, false)
if err != nil {
base.Fatalf("%v", err)
}
return pr.dictNameOf(dict)
}

View File

@ -80,7 +80,11 @@ func lookupFunction(pkg *types.Pkg, symName string) (*ir.Func, error) {
return nil, fmt.Errorf("func sym %v missing objReader", sym)
}
name := pri.pr.objIdx(pri.idx, nil, nil, false).(*ir.Name)
node, err := pri.pr.objIdxMayFail(pri.idx, nil, nil, false)
if err != nil {
return nil, fmt.Errorf("func sym %v lookup error: %w", sym, err)
}
name := node.(*ir.Name)
if name.Op() != ir.ONAME || name.Class != ir.PFUNC {
return nil, fmt.Errorf("func sym %v refers to non-function name: %v", sym, name)
}
@ -105,7 +109,11 @@ func lookupMethod(pkg *types.Pkg, symName string) (*ir.Func, error) {
return nil, fmt.Errorf("type sym %v missing objReader", typ)
}
name := pri.pr.objIdx(pri.idx, nil, nil, false).(*ir.Name)
node, err := pri.pr.objIdxMayFail(pri.idx, nil, nil, false)
if err != nil {
return nil, fmt.Errorf("func sym %v lookup error: %w", typ, err)
}
name := node.(*ir.Name)
if name.Op() != ir.OTYPE {
return nil, fmt.Errorf("type sym %v refers to non-type name: %v", typ, name)
}

View File

@ -108,7 +108,6 @@ type NamedCallEdge struct {
CallerName string
CalleeName string
CallSiteOffset int // Line offset from function start line.
CallStartLine int // Start line of the function. Can be 0 which means missing.
}
// NamedEdgeMap contains all unique call edges in the profile and their
@ -336,20 +335,19 @@ func createNamedEdgeMapFromPreprocess(r io.Reader) (edgeMap NamedEdgeMap, totalW
split := strings.Split(readStr, " ")
if len(split) != 5 {
return NamedEdgeMap{}, 0, fmt.Errorf("preprocessed profile entry got %v want 5 fields", split)
if len(split) != 2 {
return NamedEdgeMap{}, 0, fmt.Errorf("preprocessed profile entry got %v want 2 fields", split)
}
co, _ := strconv.Atoi(split[0])
cs, _ := strconv.Atoi(split[1])
namedEdge := NamedCallEdge{
CallerName: callerName,
CallSiteOffset: co - cs,
CalleeName: calleeName,
CallSiteOffset: co,
}
namedEdge.CalleeName = calleeName
EWeight, _ := strconv.ParseInt(split[4], 10, 64)
EWeight, _ := strconv.ParseInt(split[1], 10, 64)
weight[namedEdge] += EWeight
totalWeight += EWeight

View File

@ -1331,20 +1331,25 @@ func writeITab(lsym *obj.LSym, typ, iface *types.Type, allowNonImplement bool) {
// _ [4]byte
// fun [1]uintptr // variable sized. fun[0]==0 means _type does not implement inter.
// }
o := objw.SymPtr(lsym, 0, writeType(iface), 0)
o = objw.SymPtr(lsym, o, writeType(typ), 0)
o = objw.Uint32(lsym, o, types.TypeHash(typ)) // copy of type hash
o += 4 // skip unused field
c := rttype.NewCursor(lsym, 0, rttype.ITab)
c.Field("Inter").WritePtr(writeType(iface))
c.Field("Type").WritePtr(writeType(typ))
c.Field("Hash").WriteUint32(types.TypeHash(typ)) // copy of type hash
var delta int64
c = c.Field("Fun")
if !completeItab {
// If typ doesn't implement iface, make method entries be zero.
o = objw.Uintptr(lsym, o, 0)
entries = entries[:0]
}
for _, fn := range entries {
o = objw.SymPtrWeak(lsym, o, fn, 0) // method pointer for each method
c.Elem(0).WriteUintptr(0)
} else {
var a rttype.ArrayCursor
a, delta = c.ModifyArray(len(entries))
for i, fn := range entries {
a.Elem(i).WritePtrWeak(fn) // method pointer for each method
}
}
// Nothing writes static itabs, so they are read only.
objw.Global(lsym, int32(o), int16(obj.DUPOK|obj.RODATA))
objw.Global(lsym, int32(rttype.ITab.Size()+delta), int16(obj.DUPOK|obj.RODATA))
lsym.Set(obj.AttrContentAddressable, true)
}

View File

@ -278,7 +278,7 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.To.Type = obj.TYPE_REG
p.To.Reg = rd
case ssa.OpRISCV64ADD, ssa.OpRISCV64SUB, ssa.OpRISCV64SUBW, ssa.OpRISCV64XOR, ssa.OpRISCV64OR, ssa.OpRISCV64AND,
ssa.OpRISCV64SLL, ssa.OpRISCV64SRA, ssa.OpRISCV64SRAW, ssa.OpRISCV64SRL, ssa.OpRISCV64SRLW,
ssa.OpRISCV64SLL, ssa.OpRISCV64SLLW, ssa.OpRISCV64SRA, ssa.OpRISCV64SRAW, ssa.OpRISCV64SRL, ssa.OpRISCV64SRLW,
ssa.OpRISCV64SLT, ssa.OpRISCV64SLTU, ssa.OpRISCV64MUL, ssa.OpRISCV64MULW, ssa.OpRISCV64MULH,
ssa.OpRISCV64MULHU, ssa.OpRISCV64DIV, ssa.OpRISCV64DIVU, ssa.OpRISCV64DIVW,
ssa.OpRISCV64DIVUW, ssa.OpRISCV64REM, ssa.OpRISCV64REMU, ssa.OpRISCV64REMW,
@ -422,8 +422,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
case ssa.OpRISCV64ADDI, ssa.OpRISCV64ADDIW, ssa.OpRISCV64XORI, ssa.OpRISCV64ORI, ssa.OpRISCV64ANDI,
ssa.OpRISCV64SLLI, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRAIW, ssa.OpRISCV64SRLI, ssa.OpRISCV64SRLIW, ssa.OpRISCV64SLTI,
ssa.OpRISCV64SLTIU:
ssa.OpRISCV64SLLI, ssa.OpRISCV64SLLIW, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRAIW,
ssa.OpRISCV64SRLI, ssa.OpRISCV64SRLIW, ssa.OpRISCV64SLTI, ssa.OpRISCV64SLTIU:
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
p.From.Offset = v.AuxInt

View File

@ -42,6 +42,9 @@ var UncommonType *types.Type
var InterfaceSwitch *types.Type
var TypeAssert *types.Type
// Interface tables (itabs)
var ITab *types.Type
func Init() {
// Note: this has to be called explicitly instead of being
// an init function so it runs after the types package has
@ -64,6 +67,8 @@ func Init() {
InterfaceSwitch = fromReflect(reflect.TypeOf(abi.InterfaceSwitch{}))
TypeAssert = fromReflect(reflect.TypeOf(abi.TypeAssert{}))
ITab = fromReflect(reflect.TypeOf(abi.ITab{}))
// Make sure abi functions are correct. These functions are used
// by the linker which doesn't have the ability to do type layout,
// so we check the functions it uses here.
@ -80,6 +85,9 @@ func Init() {
if got, want := int64(abi.TFlagOff(ptrSize)), Type.OffsetOf("TFlag"); got != want {
base.Fatalf("abi.TFlagOff() == %d, want %d", got, want)
}
if got, want := int64(abi.ITabTypeOff(ptrSize)), ITab.OffsetOf("Type"); got != want {
base.Fatalf("abi.ITabTypeOff() == %d, want %d", got, want)
}
}
// fromReflect translates from a host type to the equivalent target type.
@ -154,6 +162,12 @@ func (c Cursor) WritePtr(target *obj.LSym) {
objw.SymPtr(c.lsym, int(c.offset), target, 0)
}
}
func (c Cursor) WritePtrWeak(target *obj.LSym) {
if c.typ.Kind() != types.TUINTPTR {
base.Fatalf("can't write ptr, it has kind %s", c.typ.Kind())
}
objw.SymPtrWeak(c.lsym, int(c.offset), target, 0)
}
func (c Cursor) WriteUintptr(val uint64) {
if c.typ.Kind() != types.TUINTPTR {
base.Fatalf("can't write uintptr, it has kind %s", c.typ.Kind())
@ -250,6 +264,17 @@ func (c Cursor) Field(name string) Cursor {
return Cursor{}
}
func (c Cursor) Elem(i int64) Cursor {
if c.typ.Kind() != types.TARRAY {
base.Fatalf("can't call Elem on non-array %v", c.typ)
}
if i < 0 || i >= c.typ.NumElem() {
base.Fatalf("element access out of bounds [%d] in [0:%d]", i, c.typ.NumElem())
}
elem := c.typ.Elem()
return Cursor{lsym: c.lsym, offset: c.offset + i*elem.Size(), typ: elem}
}
type ArrayCursor struct {
c Cursor // cursor pointing at first element
n int // number of elements

View File

@ -214,10 +214,10 @@
(Rsh64x(64|32|16|8) x y) && shiftIsBounded(v) => (SRA x y)
// Rotates.
(RotateLeft8 <t> x (MOVDconst [c])) => (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
(RotateLeft16 <t> x (MOVDconst [c])) => (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
(RotateLeft32 <t> x (MOVDconst [c])) => (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
(RotateLeft64 <t> x (MOVDconst [c])) => (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
(RotateLeft8 <t> x y) => (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
(RotateLeft16 <t> x y) => (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
(RotateLeft32 <t> x y) => (OR (SLLW <t> x y) (SRLW <t> x (NEG <y.Type> y)))
(RotateLeft64 <t> x y) => (OR (SLL <t> x y) (SRL <t> x (NEG <y.Type> y)))
(Less64 ...) => (SLT ...)
(Less32 x y) => (SLT (SignExt32to64 x) (SignExt32to64 y))
@ -733,6 +733,7 @@
(XOR (MOVDconst [val]) x) && is32Bit(val) => (XORI [val] x)
(SLL x (MOVDconst [val])) => (SLLI [int64(val&63)] x)
(SRL x (MOVDconst [val])) => (SRLI [int64(val&63)] x)
(SLLW x (MOVDconst [val])) => (SLLIW [int64(val&31)] x)
(SRLW x (MOVDconst [val])) => (SRLIW [int64(val&31)] x)
(SRA x (MOVDconst [val])) => (SRAI [int64(val&63)] x)
(SRAW x (MOVDconst [val])) => (SRAIW [int64(val&31)] x)

View File

@ -207,16 +207,18 @@ func init() {
{name: "MOVDnop", argLength: 1, reg: regInfo{inputs: []regMask{gpMask}, outputs: []regMask{gpMask}}, resultInArg0: true}, // nop, return arg0 in same register
// Shift ops
{name: "SLL", argLength: 2, reg: gp21, asm: "SLL"}, // arg0 << (aux1 & 63)
{name: "SRA", argLength: 2, reg: gp21, asm: "SRA"}, // arg0 >> (aux1 & 63), signed
{name: "SRAW", argLength: 2, reg: gp21, asm: "SRAW"}, // arg0 >> (aux1 & 31), signed
{name: "SRL", argLength: 2, reg: gp21, asm: "SRL"}, // arg0 >> (aux1 & 63), unsigned
{name: "SRLW", argLength: 2, reg: gp21, asm: "SRLW"}, // arg0 >> (aux1 & 31), unsigned
{name: "SLLI", argLength: 1, reg: gp11, asm: "SLLI", aux: "Int64"}, // arg0 << auxint, shift amount 0-63
{name: "SRAI", argLength: 1, reg: gp11, asm: "SRAI", aux: "Int64"}, // arg0 >> auxint, signed, shift amount 0-63
{name: "SRAIW", argLength: 1, reg: gp11, asm: "SRAIW", aux: "Int64"}, // arg0 >> auxint, signed, shift amount 0-31
{name: "SRLI", argLength: 1, reg: gp11, asm: "SRLI", aux: "Int64"}, // arg0 >> auxint, unsigned, shift amount 0-63
{name: "SRLIW", argLength: 1, reg: gp11, asm: "SRLIW", aux: "Int64"}, // arg0 >> auxint, unsigned, shift amount 0-31
{name: "SLL", argLength: 2, reg: gp21, asm: "SLL"}, // arg0 << (aux1 & 63), logical left shift
{name: "SLLW", argLength: 2, reg: gp21, asm: "SLLW"}, // arg0 << (aux1 & 31), logical left shift of 32 bit value, sign extended to 64 bits
{name: "SRA", argLength: 2, reg: gp21, asm: "SRA"}, // arg0 >> (aux1 & 63), arithmetic right shift
{name: "SRAW", argLength: 2, reg: gp21, asm: "SRAW"}, // arg0 >> (aux1 & 31), arithmetic right shift of 32 bit value, sign extended to 64 bits
{name: "SRL", argLength: 2, reg: gp21, asm: "SRL"}, // arg0 >> (aux1 & 63), logical right shift
{name: "SRLW", argLength: 2, reg: gp21, asm: "SRLW"}, // arg0 >> (aux1 & 31), logical right shift of 32 bit value, sign extended to 64 bits
{name: "SLLI", argLength: 1, reg: gp11, asm: "SLLI", aux: "Int64"}, // arg0 << auxint, shift amount 0-63, logical left shift
{name: "SLLIW", argLength: 1, reg: gp11, asm: "SLLIW", aux: "Int64"}, // arg0 << auxint, shift amount 0-31, logical left shift of 32 bit value, sign extended to 64 bits
{name: "SRAI", argLength: 1, reg: gp11, asm: "SRAI", aux: "Int64"}, // arg0 >> auxint, shift amount 0-63, arithmetic right shift
{name: "SRAIW", argLength: 1, reg: gp11, asm: "SRAIW", aux: "Int64"}, // arg0 >> auxint, shift amount 0-31, arithmetic right shift of 32 bit value, sign extended to 64 bits
{name: "SRLI", argLength: 1, reg: gp11, asm: "SRLI", aux: "Int64"}, // arg0 >> auxint, shift amount 0-63, logical right shift
{name: "SRLIW", argLength: 1, reg: gp11, asm: "SRLIW", aux: "Int64"}, // arg0 >> auxint, shift amount 0-31, logical right shift of 32 bit value, sign extended to 64 bits
// Bitwise ops
{name: "XOR", argLength: 2, reg: gp21, asm: "XOR", commutative: true}, // arg0 ^ arg1

View File

@ -2388,11 +2388,13 @@ const (
OpRISCV64MOVWUreg
OpRISCV64MOVDnop
OpRISCV64SLL
OpRISCV64SLLW
OpRISCV64SRA
OpRISCV64SRAW
OpRISCV64SRL
OpRISCV64SRLW
OpRISCV64SLLI
OpRISCV64SLLIW
OpRISCV64SRAI
OpRISCV64SRAIW
OpRISCV64SRLI
@ -32045,6 +32047,20 @@ var opcodeTable = [...]opInfo{
},
},
},
{
name: "SLLW",
argLen: 2,
asm: riscv.ASLLW,
reg: regInfo{
inputs: []inputInfo{
{0, 1006632944}, // X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
{1, 1006632944}, // X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
},
outputs: []outputInfo{
{0, 1006632944}, // X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
},
},
},
{
name: "SRA",
argLen: 2,
@ -32115,6 +32131,20 @@ var opcodeTable = [...]opInfo{
},
},
},
{
name: "SLLIW",
auxType: auxInt64,
argLen: 1,
asm: riscv.ASLLIW,
reg: regInfo{
inputs: []inputInfo{
{0, 1006632944}, // X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
},
outputs: []outputInfo{
{0, 1006632944}, // X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
},
},
},
{
name: "SRAI",
auxType: auxInt64,

View File

@ -2144,7 +2144,7 @@ func canRotate(c *Config, bits int64) bool {
return false
}
switch c.arch {
case "386", "amd64", "arm64":
case "386", "amd64", "arm64", "riscv64":
return true
case "arm", "s390x", "ppc64", "ppc64le", "wasm", "loong64":
return bits >= 32

View File

@ -536,6 +536,8 @@ func rewriteValueRISCV64(v *Value) bool {
return rewriteValueRISCV64_OpRISCV64SLL(v)
case OpRISCV64SLLI:
return rewriteValueRISCV64_OpRISCV64SLLI(v)
case OpRISCV64SLLW:
return rewriteValueRISCV64_OpRISCV64SLLW(v)
case OpRISCV64SLT:
return rewriteValueRISCV64_OpRISCV64SLT(v)
case OpRISCV64SLTI:
@ -6070,6 +6072,24 @@ func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
}
return false
}
func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SLLW x (MOVDconst [val]))
// result: (SLLIW [int64(val&31)] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
break
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SLLIW)
v.AuxInt = int64ToAuxInt(int64(val & 31))
v.AddArg(x)
return true
}
return false
}
func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
@ -6644,112 +6664,102 @@ func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (RotateLeft16 <t> x (MOVDconst [c]))
// result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
// match: (RotateLeft16 <t> x y)
// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
for {
t := v.Type
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpOr16)
v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v1.AuxInt = int64ToAuxInt(c & 15)
y := v_1
v.reset(OpRISCV64OR)
v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
v1.AuxInt = int64ToAuxInt(15)
v1.AddArg(y)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v3.AuxInt = int64ToAuxInt(-c & 15)
v2.AddArg2(x, v3)
v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v3.AddArg(x)
v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
v4.AuxInt = int64ToAuxInt(15)
v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
v5.AddArg(y)
v4.AddArg(v5)
v2.AddArg2(v3, v4)
v.AddArg2(v0, v2)
return true
}
return false
}
func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (RotateLeft32 <t> x (MOVDconst [c]))
// result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
// match: (RotateLeft32 <t> x y)
// result: (OR (SLLW <t> x y) (SRLW <t> x (NEG <y.Type> y)))
for {
t := v.Type
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpOr32)
v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v1.AuxInt = int64ToAuxInt(c & 31)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v3.AuxInt = int64ToAuxInt(-c & 31)
v2.AddArg2(x, v3)
v.AddArg2(v0, v2)
y := v_1
v.reset(OpRISCV64OR)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLW, t)
v0.AddArg2(x, y)
v1 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
v2 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
v2.AddArg(y)
v1.AddArg2(x, v2)
v.AddArg2(v0, v1)
return true
}
return false
}
func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (RotateLeft64 <t> x (MOVDconst [c]))
// result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
// match: (RotateLeft64 <t> x y)
// result: (OR (SLL <t> x y) (SRL <t> x (NEG <y.Type> y)))
for {
t := v.Type
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpOr64)
v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v1.AuxInt = int64ToAuxInt(c & 63)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v3.AuxInt = int64ToAuxInt(-c & 63)
v2.AddArg2(x, v3)
v.AddArg2(v0, v2)
y := v_1
v.reset(OpRISCV64OR)
v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
v0.AddArg2(x, y)
v1 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
v2 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
v2.AddArg(y)
v1.AddArg2(x, v2)
v.AddArg2(v0, v1)
return true
}
return false
}
func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (RotateLeft8 <t> x (MOVDconst [c]))
// result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
// match: (RotateLeft8 <t> x y)
// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
for {
t := v.Type
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpOr8)
v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v1.AuxInt = int64ToAuxInt(c & 7)
y := v_1
v.reset(OpRISCV64OR)
v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
v1.AuxInt = int64ToAuxInt(7)
v1.AddArg(y)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
v3.AuxInt = int64ToAuxInt(-c & 7)
v2.AddArg2(x, v3)
v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v3.AddArg(x)
v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
v4.AuxInt = int64ToAuxInt(7)
v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
v5.AddArg(y)
v4.AddArg(v5)
v2.AddArg2(v3, v4)
v.AddArg2(v0, v2)
return true
}
return false
}
func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
v_1 := v.Args[1]

View File

@ -22,6 +22,7 @@ import (
"cmd/compile/internal/liveness"
"cmd/compile/internal/objw"
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/rttype"
"cmd/compile/internal/ssa"
"cmd/compile/internal/staticdata"
"cmd/compile/internal/typecheck"
@ -4894,22 +4895,22 @@ func InitTables() {
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue2(ssa.OpRotateLeft8, types.Types[types.TUINT8], args[0], args[1])
},
sys.AMD64)
sys.AMD64, sys.RISCV64)
addF("math/bits", "RotateLeft16",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue2(ssa.OpRotateLeft16, types.Types[types.TUINT16], args[0], args[1])
},
sys.AMD64)
sys.AMD64, sys.RISCV64)
addF("math/bits", "RotateLeft32",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue2(ssa.OpRotateLeft32, types.Types[types.TUINT32], args[0], args[1])
},
sys.AMD64, sys.ARM, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm, sys.Loong64)
sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
addF("math/bits", "RotateLeft64",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue2(ssa.OpRotateLeft64, types.Types[types.TUINT64], args[0], args[1])
},
sys.AMD64, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm, sys.Loong64)
sys.AMD64, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
alias("math/bits", "RotateLeft", "math/bits", "RotateLeft64", p8...)
makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
@ -5537,7 +5538,7 @@ func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value)
i := s.expr(fn.X)
itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
s.nilCheck(itab)
itabidx := fn.Offset() + 2*int64(types.PtrSize) + 8 // offset of fun field in runtime.itab
itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
return closure, rcvr
@ -6522,7 +6523,7 @@ func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res,
targetItab = s.expr(n.ITab)
// TODO(mdempsky): Investigate whether compiling n.RType could be
// better than loading itab.typ.
target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), targetItab)) // itab.typ
target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
} else {
target = s.expr(n.RType)
}
@ -6580,7 +6581,7 @@ func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, targ
return
}
// Load type out of itab, build interface with existing idata.
off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
typ := s.load(byteptr, off)
idata := s.newValue1(ssa.OpIData, byteptr, iface)
res = s.newValue2(ssa.OpIMake, dst, typ, idata)
@ -6590,7 +6591,7 @@ func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, targ
s.startBlock(bOk)
// nonempty -> empty
// Need to load type from itab
off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
s.vars[typVar] = s.load(byteptr, off)
s.endBlock()
@ -6644,7 +6645,7 @@ func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, targ
s.startBlock(bNonNil)
typ := itab
if !src.IsEmptyInterface() {
typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab))
typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
}
// Check the cache first.
@ -6685,9 +6686,9 @@ func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, targ
// Load hash from type or itab.
var hash *ssa.Value
if src.IsEmptyInterface() {
hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, 2*s.config.PtrSize, typ), s.mem())
hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
} else {
hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, 2*s.config.PtrSize, itab), s.mem())
hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
}
hash = s.newValue1(zext, typs.Uintptr, hash)
s.vars[hashVar] = hash

View File

@ -14,8 +14,16 @@ import (
"testing"
)
type devirtualization struct {
pos string
callee string
}
const profFileName = "devirt.pprof"
const preProfFileName = "devirt.pprof.node_map"
// testPGODevirtualize tests that specific PGO devirtualize rewrites are performed.
func testPGODevirtualize(t *testing.T, dir string) {
func testPGODevirtualize(t *testing.T, dir string, want []devirtualization, pgoProfileName string) {
testenv.MustHaveGoRun(t)
t.Parallel()
@ -23,7 +31,7 @@ func testPGODevirtualize(t *testing.T, dir string) {
// Add a go.mod so we have a consistent symbol names in this temp dir.
goMod := fmt.Sprintf(`module %s
go 1.19
go 1.21
`, pkg)
if err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644); err != nil {
t.Fatalf("error writing go.mod: %v", err)
@ -40,7 +48,7 @@ go 1.19
}
// Build the test with the profile.
pprof := filepath.Join(dir, "devirt.pprof")
pprof := filepath.Join(dir, pgoProfileName)
gcflag := fmt.Sprintf("-gcflags=-m=2 -pgoprofile=%s -d=pgodebug=3", pprof)
out := filepath.Join(dir, "test.exe")
cmd = testenv.CleanCmdEnv(testenv.Command(t, testenv.GoToolPath(t), "test", "-o", out, gcflag, "."))
@ -60,51 +68,6 @@ go 1.19
t.Fatalf("error starting go test: %v", err)
}
type devirtualization struct {
pos string
callee string
}
want := []devirtualization{
// ExerciseIface
{
pos: "./devirt.go:101:20",
callee: "mult.Mult.Multiply",
},
{
pos: "./devirt.go:101:39",
callee: "Add.Add",
},
// ExerciseFuncConcrete
{
pos: "./devirt.go:173:36",
callee: "AddFn",
},
{
pos: "./devirt.go:173:15",
callee: "mult.MultFn",
},
// ExerciseFuncField
{
pos: "./devirt.go:207:35",
callee: "AddFn",
},
{
pos: "./devirt.go:207:19",
callee: "mult.MultFn",
},
// ExerciseFuncClosure
// TODO(prattmic): Closure callees not implemented.
//{
// pos: "./devirt.go:249:27",
// callee: "AddClosure.func1",
//},
//{
// pos: "./devirt.go:249:15",
// callee: "mult.MultClosure.func1",
//},
}
got := make(map[devirtualization]struct{})
devirtualizedLine := regexp.MustCompile(`(.*): PGO devirtualizing \w+ call .* to (.*)`)
@ -166,11 +129,199 @@ func TestPGODevirtualize(t *testing.T) {
if err := os.Mkdir(filepath.Join(dir, "mult.pkg"), 0755); err != nil {
t.Fatalf("error creating dir: %v", err)
}
for _, file := range []string{"devirt.go", "devirt_test.go", "devirt.pprof", filepath.Join("mult.pkg", "mult.go")} {
for _, file := range []string{"devirt.go", "devirt_test.go", profFileName, filepath.Join("mult.pkg", "mult.go")} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
testPGODevirtualize(t, dir)
want := []devirtualization{
// ExerciseIface
{
pos: "./devirt.go:101:20",
callee: "mult.Mult.Multiply",
},
{
pos: "./devirt.go:101:39",
callee: "Add.Add",
},
// ExerciseFuncConcrete
{
pos: "./devirt.go:173:36",
callee: "AddFn",
},
{
pos: "./devirt.go:173:15",
callee: "mult.MultFn",
},
// ExerciseFuncField
{
pos: "./devirt.go:207:35",
callee: "AddFn",
},
{
pos: "./devirt.go:207:19",
callee: "mult.MultFn",
},
// ExerciseFuncClosure
// TODO(prattmic): Closure callees not implemented.
//{
// pos: "./devirt.go:249:27",
// callee: "AddClosure.func1",
//},
//{
// pos: "./devirt.go:249:15",
// callee: "mult.MultClosure.func1",
//},
}
testPGODevirtualize(t, dir, want, profFileName)
}
// TestPGOPreprocessDevirtualize tests that specific functions are devirtualized when PGO
// is applied to the exact source that was profiled. The input profile is PGO preprocessed file.
func TestPGOPreprocessDevirtualize(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatalf("error getting wd: %v", err)
}
srcDir := filepath.Join(wd, "testdata", "pgo", "devirtualize")
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
if err := os.Mkdir(filepath.Join(dir, "mult.pkg"), 0755); err != nil {
t.Fatalf("error creating dir: %v", err)
}
for _, file := range []string{"devirt.go", "devirt_test.go", preProfFileName, filepath.Join("mult.pkg", "mult.go")} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
want := []devirtualization{
// ExerciseIface
{
pos: "./devirt.go:101:20",
callee: "mult.Mult.Multiply",
},
{
pos: "./devirt.go:101:39",
callee: "Add.Add",
},
// ExerciseFuncConcrete
{
pos: "./devirt.go:173:36",
callee: "AddFn",
},
{
pos: "./devirt.go:173:15",
callee: "mult.MultFn",
},
// ExerciseFuncField
{
pos: "./devirt.go:207:35",
callee: "AddFn",
},
{
pos: "./devirt.go:207:19",
callee: "mult.MultFn",
},
// ExerciseFuncClosure
// TODO(prattmic): Closure callees not implemented.
//{
// pos: "./devirt.go:249:27",
// callee: "AddClosure.func1",
//},
//{
// pos: "./devirt.go:249:15",
// callee: "mult.MultClosure.func1",
//},
}
testPGODevirtualize(t, dir, want, preProfFileName)
}
// Regression test for https://go.dev/issue/65615. If a target function changes
// from non-generic to generic we can't devirtualize it (don't know the type
// parameters), but the compiler should not crash.
func TestLookupFuncGeneric(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatalf("error getting wd: %v", err)
}
srcDir := filepath.Join(wd, "testdata", "pgo", "devirtualize")
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
if err := os.Mkdir(filepath.Join(dir, "mult.pkg"), 0755); err != nil {
t.Fatalf("error creating dir: %v", err)
}
for _, file := range []string{"devirt.go", "devirt_test.go", profFileName, filepath.Join("mult.pkg", "mult.go")} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
// Change MultFn from a concrete function to a parameterized function.
if err := convertMultToGeneric(filepath.Join(dir, "mult.pkg", "mult.go")); err != nil {
t.Fatalf("error editing mult.go: %v", err)
}
// Same as TestPGODevirtualize except for MultFn, which we cannot
// devirtualize to because it has become generic.
//
// Note that the important part of this test is that the build is
// successful, not the specific devirtualizations.
want := []devirtualization{
// ExerciseIface
{
pos: "./devirt.go:101:20",
callee: "mult.Mult.Multiply",
},
{
pos: "./devirt.go:101:39",
callee: "Add.Add",
},
// ExerciseFuncConcrete
{
pos: "./devirt.go:173:36",
callee: "AddFn",
},
// ExerciseFuncField
{
pos: "./devirt.go:207:35",
callee: "AddFn",
},
// ExerciseFuncClosure
// TODO(prattmic): Closure callees not implemented.
//{
// pos: "./devirt.go:249:27",
// callee: "AddClosure.func1",
//},
//{
// pos: "./devirt.go:249:15",
// callee: "mult.MultClosure.func1",
//},
}
testPGODevirtualize(t, dir, want, profFileName)
}
var multFnRe = regexp.MustCompile(`func MultFn\(a, b int64\) int64`)
func convertMultToGeneric(path string) error {
content, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("error opening: %w", err)
}
if !multFnRe.Match(content) {
return fmt.Errorf("MultFn not found; update regexp?")
}
// Users of MultFn shouldn't need adjustment, type inference should
// work OK.
content = multFnRe.ReplaceAll(content, []byte(`func MultFn[T int32|int64](a, b T) T`))
return os.WriteFile(path, content, 0644)
}

View File

@ -18,6 +18,9 @@ import (
"testing"
)
const profFile = "inline_hot.pprof"
const preProfFile = "inline_hot.pprof.node_map"
func buildPGOInliningTest(t *testing.T, dir string, gcflag string) []byte {
const pkg = "example.com/pgo/inline"
@ -43,12 +46,7 @@ go 1.19
}
// testPGOIntendedInlining tests that specific functions are inlined.
func testPGOIntendedInlining(t *testing.T, dir string, preprocessed ...bool) {
defaultPGOPackValue := false
if len(preprocessed) > 0 {
defaultPGOPackValue = preprocessed[0]
}
func testPGOIntendedInlining(t *testing.T, dir string, profFile string) {
testenv.MustHaveGoRun(t)
t.Parallel()
@ -91,13 +89,7 @@ func testPGOIntendedInlining(t *testing.T, dir string, preprocessed ...bool) {
// Build the test with the profile. Use a smaller threshold to test.
// TODO: maybe adjust the test to work with default threshold.
var pprof string
if defaultPGOPackValue == false {
pprof = filepath.Join(dir, "inline_hot.pprof")
} else {
pprof = filepath.Join(dir, "inline_hot.pprof.node_map")
}
gcflag := fmt.Sprintf("-m -m -pgoprofile=%s -d=pgoinlinebudget=160,pgoinlinecdfthreshold=90", pprof)
gcflag := fmt.Sprintf("-m -m -pgoprofile=%s -d=pgoinlinebudget=160,pgoinlinecdfthreshold=90", profFile)
out := buildPGOInliningTest(t, dir, gcflag)
scanner := bufio.NewScanner(bytes.NewReader(out))
@ -165,13 +157,13 @@ func TestPGOIntendedInlining(t *testing.T) {
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", "inline_hot.pprof"} {
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", profFile} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
testPGOIntendedInlining(t, dir)
testPGOIntendedInlining(t, dir, profFile)
}
// TestPGOIntendedInlining tests that specific functions are inlined when PGO
@ -186,13 +178,13 @@ func TestPGOPreprocessInlining(t *testing.T) {
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", "inline_hot.pprof.node_map"} {
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", preProfFile} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
testPGOIntendedInlining(t, dir, true)
testPGOIntendedInlining(t, dir, preProfFile)
}
// TestPGOIntendedInlining tests that specific functions are inlined when PGO
@ -208,7 +200,7 @@ func TestPGOIntendedInliningShiftedLines(t *testing.T) {
dir := t.TempDir()
// Copy most of the files unmodified.
for _, file := range []string{"inline_hot_test.go", "inline_hot.pprof"} {
for _, file := range []string{"inline_hot_test.go", profFile} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s : %v", file, err)
}
@ -240,7 +232,7 @@ func TestPGOIntendedInliningShiftedLines(t *testing.T) {
dst.Close()
testPGOIntendedInlining(t, dir)
testPGOIntendedInlining(t, dir, profFile)
}
// TestPGOSingleIndex tests that the sample index can not be 1 and compilation
@ -270,15 +262,15 @@ func TestPGOSingleIndex(t *testing.T) {
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
originalPprofFile, err := os.Open(filepath.Join(srcDir, "inline_hot.pprof"))
originalPprofFile, err := os.Open(filepath.Join(srcDir, profFile))
if err != nil {
t.Fatalf("error opening inline_hot.pprof: %v", err)
t.Fatalf("error opening %v: %v", profFile, err)
}
defer originalPprofFile.Close()
p, err := profile.Parse(originalPprofFile)
if err != nil {
t.Fatalf("error parsing inline_hot.pprof: %v", err)
t.Fatalf("error parsing %v: %v", profFile, err)
}
// Move the samples count value-type to the 0 index.
@ -289,14 +281,14 @@ func TestPGOSingleIndex(t *testing.T) {
s.Value = []int64{s.Value[tc.originalIndex]}
}
modifiedPprofFile, err := os.Create(filepath.Join(dir, "inline_hot.pprof"))
modifiedPprofFile, err := os.Create(filepath.Join(dir, profFile))
if err != nil {
t.Fatalf("error creating inline_hot.pprof: %v", err)
t.Fatalf("error creating %v: %v", profFile, err)
}
defer modifiedPprofFile.Close()
if err := p.Write(modifiedPprofFile); err != nil {
t.Fatalf("error writing inline_hot.pprof: %v", err)
t.Fatalf("error writing %v: %v", profFile, err)
}
for _, file := range []string{"inline_hot.go", "inline_hot_test.go"} {
@ -305,7 +297,7 @@ func TestPGOSingleIndex(t *testing.T) {
}
}
testPGOIntendedInlining(t, dir)
testPGOIntendedInlining(t, dir, profFile)
})
}
}
@ -343,13 +335,13 @@ func TestPGOHash(t *testing.T) {
// Copy the module to a scratch location so we can add a go.mod.
dir := t.TempDir()
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", "inline_hot.pprof"} {
for _, file := range []string{"inline_hot.go", "inline_hot_test.go", profFile} {
if err := copyFile(filepath.Join(dir, file), filepath.Join(srcDir, file)); err != nil {
t.Fatalf("error copying %s: %v", file, err)
}
}
pprof := filepath.Join(dir, "inline_hot.pprof")
pprof := filepath.Join(dir, profFile)
// build with -trimpath so the source location (thus the hash)
// does not depend on the temporary directory path.
gcflag0 := fmt.Sprintf("-pgoprofile=%s -trimpath %s=>%s -d=pgoinlinebudget=160,pgoinlinecdfthreshold=90,pgodebug=1", pprof, dir, pkg)

View File

@ -0,0 +1,52 @@
GO PREPROFILE V1
example.com/pgo/devirtualize.ExerciseFuncClosure
example.com/pgo/devirtualize/mult%2epkg.MultClosure.func1
18 93
example.com/pgo/devirtualize.ExerciseIface
example.com/pgo/devirtualize/mult%2epkg.NegMult.Multiply
49 4
example.com/pgo/devirtualize.ExerciseFuncConcrete
example.com/pgo/devirtualize.AddFn
48 103
example.com/pgo/devirtualize.ExerciseFuncField
example.com/pgo/devirtualize/mult%2epkg.NegMultFn
23 8
example.com/pgo/devirtualize.ExerciseFuncField
example.com/pgo/devirtualize/mult%2epkg.MultFn
23 94
example.com/pgo/devirtualize.ExerciseIface
example.com/pgo/devirtualize/mult%2epkg.Mult.Multiply
49 40
example.com/pgo/devirtualize.ExerciseIface
example.com/pgo/devirtualize.Add.Add
49 55
example.com/pgo/devirtualize.ExerciseFuncConcrete
example.com/pgo/devirtualize/mult%2epkg.NegMultFn
48 8
example.com/pgo/devirtualize.ExerciseFuncClosure
example.com/pgo/devirtualize/mult%2epkg.NegMultClosure.func1
18 10
example.com/pgo/devirtualize.ExerciseIface
example.com/pgo/devirtualize.Sub.Add
49 7
example.com/pgo/devirtualize.ExerciseFuncField
example.com/pgo/devirtualize.AddFn
23 101
example.com/pgo/devirtualize.ExerciseFuncField
example.com/pgo/devirtualize.SubFn
23 12
example.com/pgo/devirtualize.BenchmarkDevirtFuncConcrete
example.com/pgo/devirtualize.ExerciseFuncConcrete
1 2
example.com/pgo/devirtualize.ExerciseFuncConcrete
example.com/pgo/devirtualize/mult%2epkg.MultFn
48 91
example.com/pgo/devirtualize.ExerciseFuncConcrete
example.com/pgo/devirtualize.SubFn
48 5
example.com/pgo/devirtualize.ExerciseFuncClosure
example.com/pgo/devirtualize.Add.Add
18 92
example.com/pgo/devirtualize.ExerciseFuncClosure
example.com/pgo/devirtualize.Sub.Add
18 14

View File

@ -1,13 +1,13 @@
GO PREPROFILE V1
example.com/pgo/inline.benchmarkB
example.com/pgo/inline.A
18 17 0 1 1
18 1
example.com/pgo/inline.(*BS).NS
example.com/pgo/inline.T
13 53 124 129 2
8 3
example.com/pgo/inline.(*BS).NS
example.com/pgo/inline.T
8 53 124 129 3
13 2
example.com/pgo/inline.A
example.com/pgo/inline.(*BS).NS
7 74 1 130 129
7 129

View File

@ -34,7 +34,7 @@ func AllowsGoVersion(major, minor int) bool {
}
// ParseLangFlag verifies that the -lang flag holds a valid value, and
// exits if not. It initializes data used by langSupported.
// exits if not. It initializes data used by AllowsGoVersion.
func ParseLangFlag() {
if base.Flag.Lang == "" {
return
@ -59,6 +59,10 @@ func ParseLangFlag() {
// parseLang parses a -lang option into a langVer.
func parseLang(s string) (lang, error) {
if s == "go1" { // cmd/go's new spelling of "go1.0" (#65528)
s = "go1.0"
}
matches := goVersionRE.FindStringSubmatch(s)
if matches == nil {
return lang{}, fmt.Errorf(`should be something like "go1.12"`)

View File

@ -22,7 +22,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
// append is the only built-in that permits the use of ... for the last argument
bin := predeclaredFuncs[id]
if call.HasDots && id != _Append {
if hasDots(call) && id != _Append {
//check.errorf(call.Ellipsis, invalidOp + "invalid use of ... with built-in %s", bin.name)
check.errorf(call,
InvalidDotDotDot,
@ -114,7 +114,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
// spec: "As a special case, append also accepts a first argument assignable
// to type []byte with a second argument of string type followed by ... .
// This form appends the bytes of the string.
if nargs == 2 && call.HasDots {
if nargs == 2 && hasDots(call) {
if ok, _ := x.assignableTo(check, NewSlice(universeByte), nil); ok {
y := args[1]
if t := coreString(y.typ); t != nil && isString(t) {
@ -1034,14 +1034,3 @@ func arrayPtrDeref(typ Type) Type {
}
return typ
}
// unparen returns e with any enclosing parentheses stripped.
func unparen(e syntax.Expr) syntax.Expr {
for {
p, ok := e.(*syntax.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

View File

@ -209,7 +209,7 @@ func (check *Checker) callExpr(x *operand, call *syntax.CallExpr) exprKind {
break
}
}
if call.HasDots {
if hasDots(call) {
check.errorf(call.ArgList[0], BadDotDotDotSyntax, "invalid use of ... in conversion to %s", T)
break
}
@ -468,7 +468,7 @@ func (check *Checker) arguments(call *syntax.CallExpr, sig *Signature, targs []T
nargs := len(args)
npars := sig.params.Len()
ddd := call.HasDots
ddd := hasDots(call)
// set up parameters
sigParams := sig.params // adjusted for variadic functions (may be nil for empty parameter lists!)

View File

@ -335,6 +335,12 @@ func (t *Named) NumMethods() int {
// For an ordinary or instantiated type t, the receiver base type of this
// method is the named type t. For an uninstantiated generic type t, each
// method receiver is instantiated with its receiver type parameters.
//
// Methods are numbered deterministically: given the same list of source files
// presented to the type checker, or the same sequence of NewMethod and AddMethod
// calls, the mapping from method index to corresponding method remains the same.
// But the specific ordering is not specified and must not be relied on as it may
// change in the future.
func (t *Named) Method(i int) *Func {
t.resolve()

View File

@ -112,3 +112,51 @@ type Inst = *Tree[int]
t.Errorf("Duplicate instances in cycle: %s (%p) -> %s (%p) -> %s (%p)", Inst, Inst, Node, Node, Tree, Tree)
}
}
// TestMethodOrdering is a simple test verifying that the indices of methods of
// a named type remain the same as long as the same source and AddMethod calls
// are presented to the type checker in the same order (go.dev/issue/61298).
func TestMethodOrdering(t *testing.T) {
const src = `
package p
type T struct{}
func (T) a() {}
func (T) c() {}
func (T) b() {}
`
// should get the same method order each time
var methods []string
for i := 0; i < 5; i++ {
// collect T methods as provided in src
pkg := mustTypecheck(src, nil, nil)
T := pkg.Scope().Lookup("T").Type().(*Named)
// add a few more methods manually
for _, name := range []string{"foo", "bar", "bal"} {
m := NewFunc(nopos, pkg, name, nil /* don't care about signature */)
T.AddMethod(m)
}
// check method order
if i == 0 {
// first round: collect methods in given order
methods = make([]string, T.NumMethods())
for j := range methods {
methods[j] = T.Method(j).Name()
}
} else {
// successive rounds: methods must appear in the same order
if got := T.NumMethods(); got != len(methods) {
t.Errorf("got %d methods, want %d", got, len(methods))
continue
}
for j, m := range methods {
if got := T.Method(j).Name(); got != m {
t.Errorf("got method %s, want %s", got, m)
}
}
}
}
}

View File

@ -20,3 +20,6 @@ import "cmd/compile/internal/syntax"
// If p and q are in different files, p is before q if the filename
// of p sorts lexicographically before the filename of q.
func cmpPos(p, q syntax.Pos) int { return p.Cmp(q) }
// hasDots reports whether the last argument in the call is followed by ...
func hasDots(call *syntax.CallExpr) bool { return call.HasDots }

View File

@ -700,7 +700,7 @@ func typeHashFieldOf(pos src.XPos, itab *ir.UnaryExpr) *ir.SelectorExpr {
} else {
// runtime.itab's hash field
if itabHashField == nil {
itabHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
itabHashField = runtimeField("hash", rttype.ITab.OffsetOf("Hash"), types.Types[types.TUINT32])
}
hashField = itabHashField
}

View File

@ -10,6 +10,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/rttype"
"cmd/compile/internal/ssagen"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
@ -345,8 +346,8 @@ func mayCall(n ir.Node) bool {
// itabType loads the _type field from a runtime.itab struct.
func itabType(itab ir.Node) ir.Node {
if itabTypeField == nil {
// runtime.itab's _type field
itabTypeField = runtimeField("_type", int64(types.PtrSize), types.NewPtr(types.Types[types.TUINT8]))
// internal/abi.ITab's Type field
itabTypeField = runtimeField("Type", rttype.ITab.OffsetOf("Type"), types.NewPtr(types.Types[types.TUINT8]))
}
return boundedDotPtr(base.Pos, itab, itabTypeField)
}

18
src/cmd/dist/build.go vendored
View File

@ -903,6 +903,20 @@ func runInstall(pkg string, ch chan struct{}) {
// Define GORISCV64_value from goriscv64
asmArgs = append(asmArgs, "-D", "GORISCV64_"+goriscv64)
}
if goarch == "arm" {
// Define GOARM_value from goarm, which can be either a version
// like "6", or a version and a FP mode, like "7,hardfloat".
switch {
case strings.Contains(goarm, "7"):
asmArgs = append(asmArgs, "-D", "GOARM_7")
fallthrough
case strings.Contains(goarm, "6"):
asmArgs = append(asmArgs, "-D", "GOARM_6")
fallthrough
default:
asmArgs = append(asmArgs, "-D", "GOARM_5")
}
}
goasmh := pathf("%s/go_asm.h", workdir)
// Collect symabis from assembly code.
@ -1760,8 +1774,8 @@ var cgoEnabled = map[string]bool{
// get filtered out of cgoEnabled for 'dist list'.
// See go.dev/issue/56679.
var broken = map[string]bool{
"linux/sparc64": true, // An incomplete port. See CL 132155.
"openbsd/mips64": true, // Broken: go.dev/issue/58110.
"linux/sparc64": true, // An incomplete port. See CL 132155.
"openbsd/mips64": true, // Broken: go.dev/issue/58110.
}
// List of platforms which are first class ports. See go.dev/issue/38874.

View File

@ -3,13 +3,13 @@ module cmd
go 1.23
require (
github.com/google/pprof v0.0.0-20230811205829-9131a7e9cc17
github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5
golang.org/x/arch v0.7.0
golang.org/x/build v0.0.0-20240201175143-3ee44a092755
golang.org/x/mod v0.14.0
golang.org/x/mod v0.15.1-0.20240207185259-766dc5df63e3
golang.org/x/sync v0.6.0
golang.org/x/sys v0.16.1-0.20240110015235-f69d32aa924f
golang.org/x/telemetry v0.0.0-20240131160148-1cb064e7d4f2
golang.org/x/sys v0.17.0
golang.org/x/telemetry v0.0.0-20240208185543-e9b074dd3804
golang.org/x/term v0.16.0
golang.org/x/tools v0.17.1-0.20240119231502-e1555a36d006
)

View File

@ -1,23 +1,39 @@
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89 h1:aPflPkRFkVwbW6dmcVqfgwp1i+UWGFH6VgR1Jim5Ygc=
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgfKw=
github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs=
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.2.1 h1:F2aeBZrm2NDsc7vbovKrWSogd4wvfAxg0FQ89/iqOTk=
github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20230811205829-9131a7e9cc17 h1:0h35ESZ02+hN/MFZb7XZOXg+Rl9+Rk8fBIf5YLws9gA=
github.com/google/pprof v0.0.0-20230811205829-9131a7e9cc17/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA=
github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5 h1:E/LAvt58di64hlYjx7AsNS6C/ysHWYo+2qPCZKTQhRo=
github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik=
github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab h1:BA4a7pe6ZTd9F8kXETBoijjFJ/ntaa//1wiH9BZu4zU=
github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/yuin/goldmark v1.6.0 h1:boZcn2GTjpsynOsC0iJHnBWa4Bi0qzfJjthwauItG68=
github.com/yuin/goldmark v1.6.0/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/build v0.0.0-20240201175143-3ee44a092755 h1:irSM9p93GT4I3+Pu/grZlkwIjrXA3GfyKwlSosVbmtU=
golang.org/x/build v0.0.0-20240201175143-3ee44a092755/go.mod h1:RHSzqFUzT4+buJlGik6WptO5NxLQiR/ewD2uz3fgWuA=
golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0=
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.15.1-0.20240207185259-766dc5df63e3 h1:/p/VemLWiTsjHqHwME1Iu+xIu8s9fBtwBk8bU/ejA1A=
golang.org/x/mod v0.15.1-0.20240207185259-766dc5df63e3/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.16.1-0.20240110015235-f69d32aa924f h1:GvGFYRZ5kIldzXQj3UmUiUTMe5spPODuLKQvP38A+Qc=
golang.org/x/sys v0.16.1-0.20240110015235-f69d32aa924f/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240131160148-1cb064e7d4f2 h1:FXbfUwJ0hJkKMC/Cj47x49pH41jylMW5eMiIrJgmv2E=
golang.org/x/telemetry v0.0.0-20240131160148-1cb064e7d4f2/go.mod h1:ZthVHHkOi8rlMEsfFr3Ie42Ym1NonbFNNRKW3ci0UrU=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240208185543-e9b074dd3804 h1:mLYQpgq+cJOnmn3pR2U9o5rzEuOVgnmw59GHPgypGeo=
golang.org/x/telemetry v0.0.0-20240208185543-e9b074dd3804/go.mod h1:KG1lNk5ZFNssSZLrpVb4sMXKMpGwGXOxSG3rnu2gZQQ=
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=

View File

@ -1004,6 +1004,8 @@
// Retracted []string // retraction information, if any (with -retracted or -u)
// Deprecated string // deprecation message, if any (with -u)
// Error *ModuleError // error loading module
// Sum string // checksum for path, version (as in go.sum)
// GoModSum string // checksum for go.mod (as in go.sum)
// Origin any // provenance of module
// Reuse bool // reuse of old module info is safe
// }

View File

@ -245,6 +245,8 @@ applied to a Go struct, but now a Module struct:
Retracted []string // retraction information, if any (with -retracted or -u)
Deprecated string // deprecation message, if any (with -u)
Error *ModuleError // error loading module
Sum string // checksum for path, version (as in go.sum)
GoModSum string // checksum for go.mod (as in go.sum)
Origin any // provenance of module
Reuse bool // reuse of old module info is safe
}

View File

@ -2306,7 +2306,7 @@ func (p *Package) setBuildInfo(ctx context.Context, autoVCS bool) {
}
if mi.Replace != nil {
dm.Replace = debugModFromModinfo(mi.Replace)
} else if mi.Version != "" {
} else if mi.Version != "" && cfg.BuildMod != "vendor" {
dm.Sum = modfetch.Sum(ctx, module.Version{Path: mi.Path, Version: mi.Version})
}
return dm

View File

@ -569,6 +569,47 @@ func HaveSum(mod module.Version) bool {
return false
}
// RecordedSum returns the sum if the go.sum file contains an entry for mod.
// The boolean reports true if an entry was found or
// false if no entry found or two conflicting sums are found.
// The entry's hash must be generated with a known hash algorithm.
// mod.Version may have a "/go.mod" suffix to distinguish sums for
// .mod and .zip files.
func RecordedSum(mod module.Version) (sum string, ok bool) {
goSum.mu.Lock()
defer goSum.mu.Unlock()
inited, err := initGoSum()
foundSum := ""
if err != nil || !inited {
return "", false
}
for _, goSums := range goSum.w {
for _, h := range goSums[mod] {
if !strings.HasPrefix(h, "h1:") {
continue
}
if !goSum.status[modSum{mod, h}].dirty {
if foundSum != "" && foundSum != h { // conflicting sums exist
return "", false
}
foundSum = h
}
}
}
for _, h := range goSum.m[mod] {
if !strings.HasPrefix(h, "h1:") {
continue
}
if !goSum.status[modSum{mod, h}].dirty {
if foundSum != "" && foundSum != h { // conflicting sums exist
return "", false
}
foundSum = h
}
}
return foundSum, true
}
// checkMod checks the given module's checksum and Go version.
func checkMod(ctx context.Context, mod module.Version) {
// Do the file I/O before acquiring the go.sum lock.

View File

@ -124,7 +124,7 @@ var (
errNotFromModuleCache = fmt.Errorf("%w: not from module cache", ErrNotIndexed)
)
// GetPackage returns the IndexPackage for the package at the given path.
// GetPackage returns the IndexPackage for the directory at the given path.
// It will return ErrNotIndexed if the directory should be read without
// using the index, for instance because the index is disabled, or the package
// is not in a module.
@ -669,11 +669,9 @@ func IsStandardPackage(goroot_, compiler, path string) bool {
reldir = str.TrimFilePathPrefix(reldir, "cmd")
modroot = filepath.Join(modroot, "cmd")
}
if _, err := GetPackage(modroot, filepath.Join(modroot, reldir)); err == nil {
// Note that goroot.IsStandardPackage doesn't check that the directory
// actually contains any go files-- merely that it exists. GetPackage
// returning a nil error is enough for us to know the directory exists.
return true
if pkg, err := GetPackage(modroot, filepath.Join(modroot, reldir)); err == nil {
hasGo, err := pkg.IsDirWithGoFiles()
return err == nil && hasGo
} else if errors.Is(err, ErrNotIndexed) {
// Fall back because package isn't indexable. (Probably because
// a file was modified recently)
@ -786,8 +784,8 @@ func shouldBuild(sf *sourceFile, tags map[string]bool) bool {
return true
}
// IndexPackage holds the information needed to access information in the
// index needed to load a package in a specific directory.
// IndexPackage holds the information in the index
// needed to load a package in a specific directory.
type IndexPackage struct {
error error
dir string // directory of the package relative to the modroot

View File

@ -14,24 +14,25 @@ import (
// and the fields are documented in the help text in ../list/list.go
type ModulePublic struct {
Path string `json:",omitempty"` // module path
Version string `json:",omitempty"` // module version
Query string `json:",omitempty"` // version query corresponding to this version
Versions []string `json:",omitempty"` // available module versions
Replace *ModulePublic `json:",omitempty"` // replaced by this module
Time *time.Time `json:",omitempty"` // time version was created
Update *ModulePublic `json:",omitempty"` // available update (with -u)
Main bool `json:",omitempty"` // is this the main module?
Indirect bool `json:",omitempty"` // module is only indirectly needed by main module
Dir string `json:",omitempty"` // directory holding local copy of files, if any
GoMod string `json:",omitempty"` // path to go.mod file describing module, if any
GoVersion string `json:",omitempty"` // go version used in module
Retracted []string `json:",omitempty"` // retraction information, if any (with -retracted or -u)
Deprecated string `json:",omitempty"` // deprecation message, if any (with -u)
Error *ModuleError `json:",omitempty"` // error loading module
Origin *codehost.Origin `json:",omitempty"` // provenance of module
Reuse bool `json:",omitempty"` // reuse of old module info is safe
Path string `json:",omitempty"` // module path
Version string `json:",omitempty"` // module version
Query string `json:",omitempty"` // version query corresponding to this version
Versions []string `json:",omitempty"` // available module versions
Replace *ModulePublic `json:",omitempty"` // replaced by this module
Time *time.Time `json:",omitempty"` // time version was created
Update *ModulePublic `json:",omitempty"` // available update (with -u)
Main bool `json:",omitempty"` // is this the main module?
Indirect bool `json:",omitempty"` // module is only indirectly needed by main module
Dir string `json:",omitempty"` // directory holding local copy of files, if any
GoMod string `json:",omitempty"` // path to go.mod file describing module, if any
GoVersion string `json:",omitempty"` // go version used in module
Retracted []string `json:",omitempty"` // retraction information, if any (with -retracted or -u)
Deprecated string `json:",omitempty"` // deprecation message, if any (with -u)
Error *ModuleError `json:",omitempty"` // error loading module
Sum string `json:",omitempty"` // checksum for path, version (as in go.sum)
GoModSum string `json:",omitempty"` // checksum for go.mod (as in go.sum)
Origin *codehost.Origin `json:",omitempty"` // provenance of module
Reuse bool `json:",omitempty"` // reuse of old module info is safe
}
type ModuleError struct {

View File

@ -364,12 +364,18 @@ func moduleInfo(ctx context.Context, rs *Requirements, m module.Version, mode Li
m.GoMod = gomod
}
}
if gomodsum, ok := modfetch.RecordedSum(modkey(mod)); ok {
m.GoModSum = gomodsum
}
}
if checksumOk("") {
dir, err := modfetch.DownloadDir(ctx, mod)
if err == nil {
m.Dir = dir
}
if sum, ok := modfetch.RecordedSum(mod); ok {
m.Sum = sum
}
}
if mode&ListRetracted != 0 {

View File

@ -367,12 +367,13 @@ func asmArgs(a *Action, p *load.Package) []any {
}
if cfg.Goarch == "arm" {
// Define GOARM_value from cfg.GOARM.
switch cfg.GOARM {
case "7":
// Define GOARM_value from cfg.GOARM, which can be either a version
// like "6", or a version and a FP mode, like "7,hardfloat".
switch {
case strings.Contains(cfg.GOARM, "7"):
args = append(args, "-D", "GOARM_7")
fallthrough
case "6":
case strings.Contains(cfg.GOARM, "6"):
args = append(args, "-D", "GOARM_6")
fallthrough
default:

View File

@ -97,6 +97,7 @@ func main() {
flag.Usage = base.Usage
flag.Parse()
counter.CountFlags("cmd/go:flag-", *flag.CommandLine)
args := flag.Args()
if len(args) < 1 {
@ -152,6 +153,7 @@ func main() {
cmd, used := lookupCmd(args)
cfg.CmdName = strings.Join(args[:used], " ")
counter.Inc("cmd/go:subcommand-" + strings.ReplaceAll(cfg.CmdName, " ", "-"))
if len(cmd.Commands) > 0 {
if used >= len(args) {
help.PrintUsage(os.Stderr, cmd)
@ -239,6 +241,7 @@ func invoke(cmd *base.Command, args []string) {
} else {
base.SetFromGOFLAGS(&cmd.Flag)
cmd.Flag.Parse(args[1:])
counter.CountFlags("cmd/go/"+cmd.Name()+":flag-", cmd.Flag)
args = cmd.Flag.Args()
}
@ -323,6 +326,7 @@ func handleChdirFlag() {
_, dir, _ = strings.Cut(a, "=")
os.Args = slices.Delete(os.Args, used, used+1)
}
counter.Inc("cmd/go:flag-C")
if err := os.Chdir(dir); err != nil {
base.Fatalf("go: %v", err)

View File

@ -0,0 +1,9 @@
go build
-- go.mod --
module test
go 1.0
-- p.go --
package p

View File

@ -0,0 +1,11 @@
# Issue 65406. The testdata directory in GOROOT/src
# shouldn't be treated as a standard package.
go list -f '{{.ImportPath}} {{.Dir}}' testdata
! stderr 'found package testdata in multiple modules'
stdout 'testdata '$WORK${/}'gopath'${/}'src'
-- go.mod --
module testdata
-- p.go --
package p

View File

@ -0,0 +1,32 @@
# This test verifies that GOMODCACHE does not affect whether checksums are embedded
# with vendored files.
# See issue #46400
[short] skip 'builds and links a binary twice'
go mod tidy
go mod vendor
go build -mod=vendor
go version -m example$GOEXE
cp stdout version-m.txt
env GOMODCACHE=$WORK${/}modcache
go build -mod=vendor
go version -m example$GOEXE
cmp stdout version-m.txt
-- go.mod --
module example
go 1.22
require rsc.io/sampler v1.3.0
-- main.go --
package main
import (
"fmt"
"rsc.io/sampler"
)
func main() {
fmt.Println(sampler.Hello())
}

View File

@ -44,9 +44,9 @@ stderr '^go: module rsc.io/quote/buggy: not a known dependency'
# Module loader does not interfere with list -e (golang.org/issue/24149).
go list -e -f '{{.Error.Err}}' database
stdout 'no Go files in '
stdout 'package database is not in std'
! go list database
stderr 'no Go files in '
stderr 'package database is not in std'
-- go.mod --
module x

View File

@ -0,0 +1,16 @@
go mod tidy
go list -m -json all
stdout '"GoModSum":\s+"h1:.+"'
stdout '"Sum":\s+"h1:.+"'
-- go.mod --
module example
go 1.21
require rsc.io/quote v1.5.1
-- example.go --
package example
import _ "rsc.io/quote"

View File

@ -422,15 +422,18 @@ const (
C_U15CON /* 15 bit unsigned constant */
C_S16CON /* 16 bit signed constant */
C_U16CON /* 16 bit unsigned constant */
C_16CON /* Any constant which fits into 16 bits. Can be signed or unsigned */
C_U31CON /* 31 bit unsigned constant */
C_S32CON /* 32 bit signed constant */
C_U32CON /* 32 bit unsigned constant */
C_32CON /* Any constant which fits into 32 bits. Can be signed or unsigned */
C_S34CON /* 34 bit signed constant */
C_64CON /* Any constant which fits into 64 bits. Can be signed or unsigned */
C_SACON /* $n(REG) where n <= int16 */
C_LACON /* $n(REG) where n <= int32 */
C_DACON /* $n(REG) where n <= int64 */
C_SBRA /* A short offset argument to a branching instruction */
C_LBRA /* A long offset argument to a branching instruction */
C_LBRAPIC /* Like C_LBRA, but requires an extra NOP for potential TOC restore by the linker. */
C_BRA /* A short offset argument to a branching instruction */
C_BRAPIC /* Like C_BRA, but requires an extra NOP for potential TOC restore by the linker. */
C_ZOREG /* An $0+reg memory op */
C_SOREG /* An $n+reg memory arg where n is a 16 bit signed offset */
C_LOREG /* An $n+reg memory arg where n is a 32 bit signed offset */
@ -446,16 +449,6 @@ const (
C_TEXTSIZE /* An argument with Type obj.TYPE_TEXTSIZE */
C_NCLASS /* must be the last */
/* Aliased names which should be cleaned up, or integrated. */
C_SCON = C_U15CON
C_ADDCON = C_S16CON
C_ANDCON = C_U16CON
C_LCON = C_32CON
/* Aliased names which may be generated by ppc64map for the optab. */
C_S32CON = C_32CON
C_U32CON = C_32CON
)
const (

View File

@ -27,15 +27,18 @@ var cnames9 = []string{
"U15CON",
"S16CON",
"U16CON",
"16CON",
"U31CON",
"S32CON",
"U32CON",
"32CON",
"S34CON",
"64CON",
"SACON",
"LACON",
"DACON",
"SBRA",
"LBRA",
"LBRAPIC",
"BRA",
"BRAPIC",
"ZOREG",
"SOREG",
"LOREG",

View File

@ -110,60 +110,56 @@ var optab []Optab
var optabBase = []Optab{
{as: obj.ATEXT, a1: C_LOREG, a6: C_TEXTSIZE, type_: 0, size: 0},
{as: obj.ATEXT, a1: C_LOREG, a3: C_LCON, a6: C_TEXTSIZE, type_: 0, size: 0},
{as: obj.ATEXT, a1: C_LOREG, a3: C_32CON, a6: C_TEXTSIZE, type_: 0, size: 0},
{as: obj.ATEXT, a1: C_ADDR, a6: C_TEXTSIZE, type_: 0, size: 0},
{as: obj.ATEXT, a1: C_ADDR, a3: C_LCON, a6: C_TEXTSIZE, type_: 0, size: 0},
{as: obj.ATEXT, a1: C_ADDR, a3: C_32CON, a6: C_TEXTSIZE, type_: 0, size: 0},
/* move register */
{as: AADD, a1: C_REG, a2: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AADD, a1: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AADD, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_SCON, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_ADDCON, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 22, size: 8},
{as: AADD, a1: C_ANDCON, a6: C_REG, type_: 22, size: 8},
{as: AADDIS, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 20, size: 4},
{as: AADDIS, a1: C_ADDCON, a6: C_REG, type_: 20, size: 4},
{as: AADD, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_S16CON, a6: C_REG, type_: 4, size: 4},
{as: AADD, a1: C_U16CON, a2: C_REG, a6: C_REG, type_: 22, size: 8},
{as: AADD, a1: C_U16CON, a6: C_REG, type_: 22, size: 8},
{as: AADDIS, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 20, size: 4},
{as: AADDIS, a1: C_S16CON, a6: C_REG, type_: 20, size: 4},
{as: AADDC, a1: C_REG, a2: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AADDC, a1: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AADDC, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AADDC, a1: C_ADDCON, a6: C_REG, type_: 4, size: 4},
{as: AADDC, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 22, size: 12},
{as: AADDC, a1: C_LCON, a6: C_REG, type_: 22, size: 12},
{as: AADDC, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AADDC, a1: C_S16CON, a6: C_REG, type_: 4, size: 4},
{as: AADDC, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 22, size: 12},
{as: AADDC, a1: C_32CON, a6: C_REG, type_: 22, size: 12},
{as: AAND, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4}, /* logical, no literal */
{as: AAND, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: AANDCC, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4},
{as: AANDCC, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: AANDCC, a1: C_ANDCON, a6: C_REG, type_: 58, size: 4},
{as: AANDCC, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AANDCC, a1: C_ADDCON, a6: C_REG, type_: 23, size: 8},
{as: AANDCC, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 23, size: 8},
{as: AANDCC, a1: C_LCON, a6: C_REG, type_: 23, size: 12},
{as: AANDCC, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 23, size: 12},
{as: AANDISCC, a1: C_ANDCON, a6: C_REG, type_: 58, size: 4},
{as: AANDISCC, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AANDCC, a1: C_U16CON, a6: C_REG, type_: 58, size: 4},
{as: AANDCC, a1: C_U16CON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AANDCC, a1: C_S16CON, a6: C_REG, type_: 23, size: 8},
{as: AANDCC, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 23, size: 8},
{as: AANDCC, a1: C_32CON, a6: C_REG, type_: 23, size: 12},
{as: AANDCC, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 23, size: 12},
{as: AANDISCC, a1: C_U16CON, a6: C_REG, type_: 58, size: 4},
{as: AANDISCC, a1: C_U16CON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AMULLW, a1: C_REG, a2: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AMULLW, a1: C_REG, a6: C_REG, type_: 2, size: 4},
{as: AMULLW, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_ADDCON, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_ANDCON, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 22, size: 12},
{as: AMULLW, a1: C_LCON, a6: C_REG, type_: 22, size: 12},
{as: AMULLW, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_S16CON, a6: C_REG, type_: 4, size: 4},
{as: AMULLW, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 22, size: 12},
{as: AMULLW, a1: C_32CON, a6: C_REG, type_: 22, size: 12},
{as: ASUBC, a1: C_REG, a2: C_REG, a6: C_REG, type_: 10, size: 4},
{as: ASUBC, a1: C_REG, a6: C_REG, type_: 10, size: 4},
{as: ASUBC, a1: C_REG, a3: C_ADDCON, a6: C_REG, type_: 27, size: 4},
{as: ASUBC, a1: C_REG, a3: C_LCON, a6: C_REG, type_: 28, size: 12},
{as: ASUBC, a1: C_REG, a3: C_S16CON, a6: C_REG, type_: 27, size: 4},
{as: ASUBC, a1: C_REG, a3: C_32CON, a6: C_REG, type_: 28, size: 12},
{as: AOR, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4}, /* logical, literal not cc (or/xor) */
{as: AOR, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: AOR, a1: C_ANDCON, a6: C_REG, type_: 58, size: 4},
{as: AOR, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AOR, a1: C_ADDCON, a6: C_REG, type_: 23, size: 8},
{as: AOR, a1: C_ADDCON, a2: C_REG, a6: C_REG, type_: 23, size: 8},
{as: AOR, a1: C_LCON, a6: C_REG, type_: 23, size: 12},
{as: AOR, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 23, size: 12},
{as: AORIS, a1: C_ANDCON, a6: C_REG, type_: 58, size: 4},
{as: AORIS, a1: C_ANDCON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AOR, a1: C_U16CON, a6: C_REG, type_: 58, size: 4},
{as: AOR, a1: C_U16CON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: AOR, a1: C_S16CON, a6: C_REG, type_: 23, size: 8},
{as: AOR, a1: C_S16CON, a2: C_REG, a6: C_REG, type_: 23, size: 8},
{as: AOR, a1: C_32CON, a6: C_REG, type_: 23, size: 12},
{as: AOR, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 23, size: 12},
{as: AORIS, a1: C_U16CON, a6: C_REG, type_: 58, size: 4},
{as: AORIS, a1: C_U16CON, a2: C_REG, a6: C_REG, type_: 58, size: 4},
{as: ADIVW, a1: C_REG, a2: C_REG, a6: C_REG, type_: 2, size: 4}, /* op r1[,r2],r3 */
{as: ADIVW, a1: C_REG, a6: C_REG, type_: 2, size: 4},
{as: ASUB, a1: C_REG, a2: C_REG, a6: C_REG, type_: 10, size: 4}, /* op r2[,r1],r3 */
@ -172,33 +168,33 @@ var optabBase = []Optab{
{as: ASLW, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASLD, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASLD, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASLD, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 25, size: 4},
{as: ASLD, a1: C_SCON, a6: C_REG, type_: 25, size: 4},
{as: AEXTSWSLI, a1: C_SCON, a6: C_REG, type_: 25, size: 4},
{as: AEXTSWSLI, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 25, size: 4},
{as: ASLW, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 57, size: 4},
{as: ASLW, a1: C_SCON, a6: C_REG, type_: 57, size: 4},
{as: ASLD, a1: C_U15CON, a2: C_REG, a6: C_REG, type_: 25, size: 4},
{as: ASLD, a1: C_U15CON, a6: C_REG, type_: 25, size: 4},
{as: AEXTSWSLI, a1: C_U15CON, a6: C_REG, type_: 25, size: 4},
{as: AEXTSWSLI, a1: C_U15CON, a2: C_REG, a6: C_REG, type_: 25, size: 4},
{as: ASLW, a1: C_U15CON, a2: C_REG, a6: C_REG, type_: 57, size: 4},
{as: ASLW, a1: C_U15CON, a6: C_REG, type_: 57, size: 4},
{as: ASRAW, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASRAW, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASRAW, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 56, size: 4},
{as: ASRAW, a1: C_SCON, a6: C_REG, type_: 56, size: 4},
{as: ASRAW, a1: C_U15CON, a2: C_REG, a6: C_REG, type_: 56, size: 4},
{as: ASRAW, a1: C_U15CON, a6: C_REG, type_: 56, size: 4},
{as: ASRAD, a1: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASRAD, a1: C_REG, a2: C_REG, a6: C_REG, type_: 6, size: 4},
{as: ASRAD, a1: C_SCON, a2: C_REG, a6: C_REG, type_: 56, size: 4},
{as: ASRAD, a1: C_SCON, a6: C_REG, type_: 56, size: 4},
{as: ARLWNM, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_SCON, a2: C_REG, a3: C_SCON, a4: C_SCON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_REG, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_REG, a2: C_REG, a3: C_SCON, a4: C_SCON, a6: C_REG, type_: 63, size: 4},
{as: ACLRLSLWI, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 62, size: 4},
{as: ARLDMI, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 30, size: 4},
{as: ARLDC, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 29, size: 4},
{as: ASRAD, a1: C_U15CON, a2: C_REG, a6: C_REG, type_: 56, size: 4},
{as: ASRAD, a1: C_U15CON, a6: C_REG, type_: 56, size: 4},
{as: ARLWNM, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_U15CON, a2: C_REG, a3: C_U15CON, a4: C_U15CON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_REG, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 63, size: 4},
{as: ARLWNM, a1: C_REG, a2: C_REG, a3: C_U15CON, a4: C_U15CON, a6: C_REG, type_: 63, size: 4},
{as: ACLRLSLWI, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 62, size: 4},
{as: ARLDMI, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 30, size: 4},
{as: ARLDC, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 29, size: 4},
{as: ARLDC, a1: C_REG, a3: C_U8CON, a4: C_U8CON, a6: C_REG, type_: 9, size: 4},
{as: ARLDCL, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 29, size: 4},
{as: ARLDCL, a1: C_REG, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 14, size: 4},
{as: ARLDICL, a1: C_REG, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 14, size: 4},
{as: ARLDICL, a1: C_SCON, a2: C_REG, a3: C_LCON, a6: C_REG, type_: 14, size: 4},
{as: ARLDCL, a1: C_REG, a3: C_LCON, a6: C_REG, type_: 14, size: 4},
{as: ARLDCL, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 29, size: 4},
{as: ARLDCL, a1: C_REG, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 14, size: 4},
{as: ARLDICL, a1: C_REG, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 14, size: 4},
{as: ARLDICL, a1: C_U15CON, a2: C_REG, a3: C_32CON, a6: C_REG, type_: 14, size: 4},
{as: ARLDCL, a1: C_REG, a3: C_32CON, a6: C_REG, type_: 14, size: 4},
{as: AFADD, a1: C_FREG, a6: C_FREG, type_: 2, size: 4},
{as: AFADD, a1: C_FREG, a2: C_FREG, a6: C_FREG, type_: 2, size: 4},
{as: AFABS, a1: C_FREG, a6: C_FREG, type_: 33, size: 4},
@ -232,8 +228,7 @@ var optabBase = []Optab{
{as: AMOVBZ, a1: C_REG, a6: C_XOREG, type_: 108, size: 4},
{as: AMOVBZ, a1: C_REG, a6: C_REG, type_: 13, size: 4},
{as: AMOVD, a1: C_ADDCON, a6: C_REG, type_: 3, size: 4},
{as: AMOVD, a1: C_ANDCON, a6: C_REG, type_: 3, size: 4},
{as: AMOVD, a1: C_16CON, a6: C_REG, type_: 3, size: 4},
{as: AMOVD, a1: C_SACON, a6: C_REG, type_: 3, size: 4},
{as: AMOVD, a1: C_SOREG, a6: C_REG, type_: 8, size: 4},
{as: AMOVD, a1: C_XOREG, a6: C_REG, type_: 109, size: 4},
@ -245,8 +240,7 @@ var optabBase = []Optab{
{as: AMOVD, a1: C_REG, a6: C_SPR, type_: 66, size: 4},
{as: AMOVD, a1: C_REG, a6: C_REG, type_: 13, size: 4},
{as: AMOVW, a1: C_ADDCON, a6: C_REG, type_: 3, size: 4},
{as: AMOVW, a1: C_ANDCON, a6: C_REG, type_: 3, size: 4},
{as: AMOVW, a1: C_16CON, a6: C_REG, type_: 3, size: 4},
{as: AMOVW, a1: C_SACON, a6: C_REG, type_: 3, size: 4},
{as: AMOVW, a1: C_CREG, a6: C_REG, type_: 68, size: 4},
{as: AMOVW, a1: C_SOREG, a6: C_REG, type_: 8, size: 4},
@ -258,7 +252,7 @@ var optabBase = []Optab{
{as: AMOVW, a1: C_REG, a6: C_SPR, type_: 66, size: 4},
{as: AMOVW, a1: C_REG, a6: C_REG, type_: 13, size: 4},
{as: AFMOVD, a1: C_ADDCON, a6: C_FREG, type_: 24, size: 8},
{as: AFMOVD, a1: C_S16CON, a6: C_FREG, type_: 24, size: 8},
{as: AFMOVD, a1: C_SOREG, a6: C_FREG, type_: 8, size: 4},
{as: AFMOVD, a1: C_XOREG, a6: C_FREG, type_: 109, size: 4},
{as: AFMOVD, a1: C_ZCON, a6: C_FREG, type_: 24, size: 4},
@ -275,29 +269,28 @@ var optabBase = []Optab{
{as: AMOVFL, a1: C_CREG, a6: C_CREG, type_: 67, size: 4},
{as: AMOVFL, a1: C_FPSCR, a6: C_CREG, type_: 73, size: 4},
{as: AMOVFL, a1: C_FPSCR, a6: C_FREG, type_: 53, size: 4},
{as: AMOVFL, a1: C_FREG, a3: C_LCON, a6: C_FPSCR, type_: 64, size: 4},
{as: AMOVFL, a1: C_FREG, a3: C_32CON, a6: C_FPSCR, type_: 64, size: 4},
{as: AMOVFL, a1: C_FREG, a6: C_FPSCR, type_: 64, size: 4},
{as: AMOVFL, a1: C_LCON, a6: C_FPSCR, type_: 65, size: 4},
{as: AMOVFL, a1: C_32CON, a6: C_FPSCR, type_: 65, size: 4},
{as: AMOVFL, a1: C_REG, a6: C_CREG, type_: 69, size: 4},
{as: AMOVFL, a1: C_REG, a6: C_LCON, type_: 69, size: 4},
{as: AMOVFL, a1: C_REG, a6: C_32CON, type_: 69, size: 4},
{as: ASYSCALL, type_: 5, size: 4},
{as: ASYSCALL, a1: C_REG, type_: 77, size: 12},
{as: ASYSCALL, a1: C_SCON, type_: 77, size: 12},
{as: ABEQ, a6: C_SBRA, type_: 16, size: 4},
{as: ABEQ, a1: C_CREG, a6: C_SBRA, type_: 16, size: 4},
{as: ABR, a6: C_LBRA, type_: 11, size: 4}, // b label
{as: ABR, a6: C_LBRAPIC, type_: 11, size: 8}, // b label; nop
{as: ABR, a6: C_LR, type_: 18, size: 4}, // blr
{as: ABR, a6: C_CTR, type_: 18, size: 4}, // bctr
{as: ABC, a1: C_SCON, a2: C_CRBIT, a6: C_SBRA, type_: 16, size: 4}, // bc bo, bi, label
{as: ABC, a1: C_SCON, a2: C_CRBIT, a6: C_LBRA, type_: 17, size: 4}, // bc bo, bi, label
{as: ABC, a1: C_SCON, a2: C_CRBIT, a6: C_LR, type_: 18, size: 4}, // bclr bo, bi
{as: ABC, a1: C_SCON, a2: C_CRBIT, a3: C_SCON, a6: C_LR, type_: 18, size: 4}, // bclr bo, bi, bh
{as: ABC, a1: C_SCON, a2: C_CRBIT, a6: C_CTR, type_: 18, size: 4}, // bcctr bo, bi
{as: ABDNZ, a6: C_SBRA, type_: 16, size: 4},
{as: ASYSCALL, a1: C_U15CON, type_: 77, size: 12},
{as: ABEQ, a6: C_BRA, type_: 16, size: 4},
{as: ABEQ, a1: C_CREG, a6: C_BRA, type_: 16, size: 4},
{as: ABR, a6: C_BRA, type_: 11, size: 4}, // b label
{as: ABR, a6: C_BRAPIC, type_: 11, size: 8}, // b label; nop
{as: ABR, a6: C_LR, type_: 18, size: 4}, // blr
{as: ABR, a6: C_CTR, type_: 18, size: 4}, // bctr
{as: ABC, a1: C_U15CON, a2: C_CRBIT, a6: C_BRA, type_: 16, size: 4}, // bc bo, bi, label
{as: ABC, a1: C_U15CON, a2: C_CRBIT, a6: C_LR, type_: 18, size: 4}, // bclr bo, bi
{as: ABC, a1: C_U15CON, a2: C_CRBIT, a3: C_U15CON, a6: C_LR, type_: 18, size: 4}, // bclr bo, bi, bh
{as: ABC, a1: C_U15CON, a2: C_CRBIT, a6: C_CTR, type_: 18, size: 4}, // bcctr bo, bi
{as: ABDNZ, a6: C_BRA, type_: 16, size: 4},
{as: ASYNC, type_: 46, size: 4},
{as: AWORD, a1: C_LCON, type_: 40, size: 4},
{as: AWORD, a1: C_32CON, type_: 40, size: 4},
{as: ADWORD, a1: C_64CON, type_: 31, size: 8},
{as: ADWORD, a1: C_LACON, type_: 31, size: 8},
{as: AADDME, a1: C_REG, a6: C_REG, type_: 47, size: 4},
@ -313,19 +306,19 @@ var optabBase = []Optab{
{as: AREMU, a1: C_REG, a2: C_REG, a6: C_REG, type_: 50, size: 16},
{as: AREMD, a1: C_REG, a6: C_REG, type_: 51, size: 12},
{as: AREMD, a1: C_REG, a2: C_REG, a6: C_REG, type_: 51, size: 12},
{as: AMTFSB0, a1: C_SCON, type_: 52, size: 4},
{as: AMTFSB0, a1: C_U15CON, type_: 52, size: 4},
/* Other ISA 2.05+ instructions */
{as: APOPCNTD, a1: C_REG, a6: C_REG, type_: 93, size: 4}, /* population count, x-form */
{as: ACMPB, a1: C_REG, a2: C_REG, a6: C_REG, type_: 92, size: 4}, /* compare byte, x-form */
{as: ACMPEQB, a1: C_REG, a2: C_REG, a6: C_CREG, type_: 92, size: 4}, /* compare equal byte, x-form, ISA 3.0 */
{as: ACMPEQB, a1: C_REG, a6: C_REG, type_: 70, size: 4},
{as: AFTDIV, a1: C_FREG, a2: C_FREG, a6: C_SCON, type_: 92, size: 4}, /* floating test for sw divide, x-form */
{as: AFTSQRT, a1: C_FREG, a6: C_SCON, type_: 93, size: 4}, /* floating test for sw square root, x-form */
{as: ACOPY, a1: C_REG, a6: C_REG, type_: 92, size: 4}, /* copy/paste facility, x-form */
{as: ADARN, a1: C_SCON, a6: C_REG, type_: 92, size: 4}, /* deliver random number, x-form */
{as: AMADDHD, a1: C_REG, a2: C_REG, a3: C_REG, a6: C_REG, type_: 83, size: 4}, /* multiply-add high/low doubleword, va-form */
{as: AADDEX, a1: C_REG, a2: C_REG, a3: C_SCON, a6: C_REG, type_: 94, size: 4}, /* add extended using alternate carry, z23-form */
{as: ACRAND, a1: C_CRBIT, a2: C_CRBIT, a6: C_CRBIT, type_: 2, size: 4}, /* logical ops for condition register bits xl-form */
{as: AFTDIV, a1: C_FREG, a2: C_FREG, a6: C_U15CON, type_: 92, size: 4}, /* floating test for sw divide, x-form */
{as: AFTSQRT, a1: C_FREG, a6: C_U15CON, type_: 93, size: 4}, /* floating test for sw square root, x-form */
{as: ACOPY, a1: C_REG, a6: C_REG, type_: 92, size: 4}, /* copy/paste facility, x-form */
{as: ADARN, a1: C_U15CON, a6: C_REG, type_: 92, size: 4}, /* deliver random number, x-form */
{as: AMADDHD, a1: C_REG, a2: C_REG, a3: C_REG, a6: C_REG, type_: 83, size: 4}, /* multiply-add high/low doubleword, va-form */
{as: AADDEX, a1: C_REG, a2: C_REG, a3: C_U15CON, a6: C_REG, type_: 94, size: 4}, /* add extended using alternate carry, z23-form */
{as: ACRAND, a1: C_CRBIT, a2: C_CRBIT, a6: C_CRBIT, type_: 2, size: 4}, /* logical ops for condition register bits xl-form */
/* Misc ISA 3.0 instructions */
{as: ASETB, a1: C_CREG, a6: C_REG, type_: 110, size: 4},
@ -368,7 +361,7 @@ var optabBase = []Optab{
/* Vector shift */
{as: AVS, a1: C_VREG, a2: C_VREG, a6: C_VREG, type_: 82, size: 4}, /* vector shift, vx-form */
{as: AVSA, a1: C_VREG, a2: C_VREG, a6: C_VREG, type_: 82, size: 4}, /* vector shift algebraic, vx-form */
{as: AVSOI, a1: C_ANDCON, a2: C_VREG, a3: C_VREG, a6: C_VREG, type_: 83, size: 4}, /* vector shift by octet immediate, va-form */
{as: AVSOI, a1: C_U16CON, a2: C_VREG, a3: C_VREG, a6: C_VREG, type_: 83, size: 4}, /* vector shift by octet immediate, va-form */
/* Vector count */
{as: AVCLZ, a1: C_VREG, a6: C_VREG, type_: 85, size: 4}, /* vector count leading zeros, vx-form */
@ -392,10 +385,8 @@ var optabBase = []Optab{
{as: AVSEL, a1: C_VREG, a2: C_VREG, a3: C_VREG, a6: C_VREG, type_: 83, size: 4}, /* vector select, va-form */
/* Vector splat */
{as: AVSPLTB, a1: C_SCON, a2: C_VREG, a6: C_VREG, type_: 82, size: 4}, /* vector splat, vx-form */
{as: AVSPLTB, a1: C_ADDCON, a2: C_VREG, a6: C_VREG, type_: 82, size: 4},
{as: AVSPLTISB, a1: C_SCON, a6: C_VREG, type_: 82, size: 4}, /* vector splat immediate, vx-form */
{as: AVSPLTISB, a1: C_ADDCON, a6: C_VREG, type_: 82, size: 4},
{as: AVSPLTB, a1: C_S16CON, a2: C_VREG, a6: C_VREG, type_: 82, size: 4},
{as: AVSPLTISB, a1: C_S16CON, a6: C_VREG, type_: 82, size: 4},
/* Vector AES */
{as: AVCIPH, a1: C_VREG, a2: C_VREG, a6: C_VREG, type_: 82, size: 4}, /* vector AES cipher, vx-form */
@ -403,7 +394,7 @@ var optabBase = []Optab{
{as: AVSBOX, a1: C_VREG, a6: C_VREG, type_: 82, size: 4}, /* vector AES subbytes, vx-form */
/* Vector SHA */
{as: AVSHASIGMA, a1: C_ANDCON, a2: C_VREG, a3: C_ANDCON, a6: C_VREG, type_: 82, size: 4}, /* vector SHA sigma, vx-form */
{as: AVSHASIGMA, a1: C_U16CON, a2: C_VREG, a3: C_U16CON, a6: C_VREG, type_: 82, size: 4}, /* vector SHA sigma, vx-form */
/* VSX vector load */
{as: ALXVD2X, a1: C_XOREG, a6: C_VSREG, type_: 87, size: 4}, /* vsx vector load, xx1-form */
@ -447,14 +438,14 @@ var optabBase = []Optab{
{as: AXXMRGHW, a1: C_VSREG, a2: C_VSREG, a6: C_VSREG, type_: 90, size: 4}, /* vsx merge, xx3-form */
/* VSX splat */
{as: AXXSPLTW, a1: C_VSREG, a3: C_SCON, a6: C_VSREG, type_: 89, size: 4}, /* vsx splat, xx2-form */
{as: AXXSPLTIB, a1: C_SCON, a6: C_VSREG, type_: 100, size: 4}, /* vsx splat, xx2-form */
{as: AXXSPLTW, a1: C_VSREG, a3: C_U15CON, a6: C_VSREG, type_: 89, size: 4}, /* vsx splat, xx2-form */
{as: AXXSPLTIB, a1: C_U15CON, a6: C_VSREG, type_: 100, size: 4}, /* vsx splat, xx2-form */
/* VSX permute */
{as: AXXPERM, a1: C_VSREG, a2: C_VSREG, a6: C_VSREG, type_: 90, size: 4}, /* vsx permute, xx3-form */
/* VSX shift */
{as: AXXSLDWI, a1: C_VSREG, a2: C_VSREG, a3: C_SCON, a6: C_VSREG, type_: 90, size: 4}, /* vsx shift immediate, xx3-form */
{as: AXXSLDWI, a1: C_VSREG, a2: C_VSREG, a3: C_U15CON, a6: C_VSREG, type_: 90, size: 4}, /* vsx shift immediate, xx3-form */
/* VSX reverse bytes */
{as: AXXBRQ, a1: C_VSREG, a6: C_VSREG, type_: 101, size: 4}, /* vsx reverse bytes */
@ -479,45 +470,45 @@ var optabBase = []Optab{
{as: ACMP, a1: C_REG, a6: C_REG, type_: 70, size: 4},
{as: ACMP, a1: C_REG, a2: C_CREG, a6: C_REG, type_: 70, size: 4},
{as: ACMP, a1: C_REG, a6: C_ADDCON, type_: 71, size: 4},
{as: ACMP, a1: C_REG, a2: C_CREG, a6: C_ADDCON, type_: 71, size: 4},
{as: ACMP, a1: C_REG, a6: C_S16CON, type_: 71, size: 4},
{as: ACMP, a1: C_REG, a2: C_CREG, a6: C_S16CON, type_: 71, size: 4},
{as: ACMPU, a1: C_REG, a6: C_REG, type_: 70, size: 4},
{as: ACMPU, a1: C_REG, a2: C_CREG, a6: C_REG, type_: 70, size: 4},
{as: ACMPU, a1: C_REG, a6: C_ANDCON, type_: 71, size: 4},
{as: ACMPU, a1: C_REG, a2: C_CREG, a6: C_ANDCON, type_: 71, size: 4},
{as: ACMPU, a1: C_REG, a6: C_U16CON, type_: 71, size: 4},
{as: ACMPU, a1: C_REG, a2: C_CREG, a6: C_U16CON, type_: 71, size: 4},
{as: AFCMPO, a1: C_FREG, a6: C_FREG, type_: 70, size: 4},
{as: AFCMPO, a1: C_FREG, a2: C_CREG, a6: C_FREG, type_: 70, size: 4},
{as: ATW, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 60, size: 4},
{as: ATW, a1: C_LCON, a2: C_REG, a6: C_ADDCON, type_: 61, size: 4},
{as: ATW, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 60, size: 4},
{as: ATW, a1: C_32CON, a2: C_REG, a6: C_S16CON, type_: 61, size: 4},
{as: ADCBF, a1: C_SOREG, type_: 43, size: 4},
{as: ADCBF, a1: C_XOREG, type_: 43, size: 4},
{as: ADCBF, a1: C_XOREG, a2: C_REG, a6: C_SCON, type_: 43, size: 4},
{as: ADCBF, a1: C_SOREG, a6: C_SCON, type_: 43, size: 4},
{as: ADCBF, a1: C_XOREG, a6: C_SCON, type_: 43, size: 4},
{as: ADCBF, a1: C_XOREG, a2: C_REG, a6: C_U15CON, type_: 43, size: 4},
{as: ADCBF, a1: C_SOREG, a6: C_U15CON, type_: 43, size: 4},
{as: ADCBF, a1: C_XOREG, a6: C_U15CON, type_: 43, size: 4},
{as: ASTDCCC, a1: C_REG, a2: C_REG, a6: C_XOREG, type_: 44, size: 4},
{as: ASTDCCC, a1: C_REG, a6: C_XOREG, type_: 44, size: 4},
{as: ALDAR, a1: C_XOREG, a6: C_REG, type_: 45, size: 4},
{as: ALDAR, a1: C_XOREG, a3: C_ANDCON, a6: C_REG, type_: 45, size: 4},
{as: ALDAR, a1: C_XOREG, a3: C_U16CON, a6: C_REG, type_: 45, size: 4},
{as: AEIEIO, type_: 46, size: 4},
{as: ATLBIE, a1: C_REG, type_: 49, size: 4},
{as: ATLBIE, a1: C_SCON, a6: C_REG, type_: 49, size: 4},
{as: ATLBIE, a1: C_U15CON, a6: C_REG, type_: 49, size: 4},
{as: ASLBMFEE, a1: C_REG, a6: C_REG, type_: 55, size: 4},
{as: ASLBMTE, a1: C_REG, a6: C_REG, type_: 55, size: 4},
{as: ASTSW, a1: C_REG, a6: C_XOREG, type_: 44, size: 4},
{as: ASTSW, a1: C_REG, a3: C_LCON, a6: C_ZOREG, type_: 41, size: 4},
{as: ASTSW, a1: C_REG, a3: C_32CON, a6: C_ZOREG, type_: 41, size: 4},
{as: ALSW, a1: C_XOREG, a6: C_REG, type_: 45, size: 4},
{as: ALSW, a1: C_ZOREG, a3: C_LCON, a6: C_REG, type_: 42, size: 4},
{as: ALSW, a1: C_ZOREG, a3: C_32CON, a6: C_REG, type_: 42, size: 4},
{as: obj.AUNDEF, type_: 78, size: 4},
{as: obj.APCDATA, a1: C_LCON, a6: C_LCON, type_: 0, size: 0},
{as: obj.AFUNCDATA, a1: C_SCON, a6: C_ADDR, type_: 0, size: 0},
{as: obj.APCDATA, a1: C_32CON, a6: C_32CON, type_: 0, size: 0},
{as: obj.AFUNCDATA, a1: C_U15CON, a6: C_ADDR, type_: 0, size: 0},
{as: obj.ANOP, type_: 0, size: 0},
{as: obj.ANOP, a1: C_LCON, type_: 0, size: 0}, // NOP operand variations added for #40689
{as: obj.ANOP, a1: C_REG, type_: 0, size: 0}, // to preserve previous behavior
{as: obj.ANOP, a1: C_32CON, type_: 0, size: 0}, // NOP operand variations added for #40689
{as: obj.ANOP, a1: C_REG, type_: 0, size: 0}, // to preserve previous behavior
{as: obj.ANOP, a1: C_FREG, type_: 0, size: 0},
{as: obj.ADUFFZERO, a6: C_LBRA, type_: 11, size: 4}, // same as ABR/ABL
{as: obj.ADUFFCOPY, a6: C_LBRA, type_: 11, size: 4}, // same as ABR/ABL
{as: obj.APCALIGN, a1: C_LCON, type_: 0, size: 0}, // align code
{as: obj.ADUFFZERO, a6: C_BRA, type_: 11, size: 4}, // same as ABR/ABL
{as: obj.ADUFFCOPY, a6: C_BRA, type_: 11, size: 4}, // same as ABR/ABL
{as: obj.APCALIGN, a1: C_32CON, type_: 0, size: 0}, // align code
}
// These are opcodes above which may generate different sequences depending on whether prefix opcode support
@ -552,7 +543,7 @@ var prefixableOptab = []PrefixableOptab{
{Optab: Optab{as: AMOVD, a1: C_REG, a6: C_LOREG, type_: 35, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVD, a1: C_REG, a6: C_ADDR, type_: 74, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVW, a1: C_LCON, a6: C_REG, type_: 19, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVW, a1: C_32CON, a6: C_REG, type_: 19, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVW, a1: C_LACON, a6: C_REG, type_: 26, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVW, a1: C_LOREG, a6: C_REG, type_: 36, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AMOVW, a1: C_ADDR, a6: C_REG, type_: 75, size: 8}, minGOPPC64: 10, pfxsize: 8},
@ -574,8 +565,8 @@ var prefixableOptab = []PrefixableOptab{
{Optab: Optab{as: AFMOVD, a1: C_FREG, a6: C_LOREG, type_: 35, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AFMOVD, a1: C_FREG, a6: C_ADDR, type_: 74, size: 8}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_LCON, a2: C_REG, a6: C_REG, type_: 22, size: 12}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_LCON, a6: C_REG, type_: 22, size: 12}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_32CON, a2: C_REG, a6: C_REG, type_: 22, size: 12}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_32CON, a6: C_REG, type_: 22, size: 12}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_S34CON, a2: C_REG, a6: C_REG, type_: 22, size: 20}, minGOPPC64: 10, pfxsize: 8},
{Optab: Optab{as: AADD, a1: C_S34CON, a6: C_REG, type_: 22, size: 20}, minGOPPC64: 10, pfxsize: 8},
}
@ -955,7 +946,7 @@ func (c *ctxt9) aclass(a *obj.Addr) int {
f64 := a.Val.(float64)
if f64 == 0 {
if math.Signbit(f64) {
return C_ADDCON
return C_S16CON
}
return C_ZCON
}
@ -1017,7 +1008,7 @@ func (c *ctxt9) aclass(a *obj.Addr) int {
case sbits <= 16:
return C_U16CON
case sbits <= 31:
return C_U32CON
return C_U31CON
case sbits <= 32:
return C_U32CON
case sbits <= 33:
@ -1041,9 +1032,9 @@ func (c *ctxt9) aclass(a *obj.Addr) int {
case obj.TYPE_BRANCH:
if a.Sym != nil && c.ctxt.Flag_dynlink && !pfxEnabled {
return C_LBRAPIC
return C_BRAPIC
}
return C_SBRA
return C_BRA
}
return C_GOK
@ -1114,7 +1105,7 @@ func (c *ctxt9) oplook(p *obj.Prog) *Optab {
return &ops[0]
}
// Compare two operand types (ex C_REG, or C_SCON)
// Compare two operand types (ex C_REG, or C_U15CON)
// and return true if b is compatible with a.
//
// Argument comparison isn't reflexitive, so care must be taken.
@ -1145,13 +1136,20 @@ func cmp(a int, b int) bool {
return cmp(C_U5CON, b)
case C_U15CON:
return cmp(C_U8CON, b)
case C_U16CON:
return cmp(C_U15CON, b)
case C_S16CON:
return cmp(C_U15CON, b)
case C_32CON:
case C_U16CON:
return cmp(C_U15CON, b)
case C_16CON:
return cmp(C_S16CON, b) || cmp(C_U16CON, b)
case C_U31CON:
return cmp(C_U16CON, b)
case C_U32CON:
return cmp(C_U31CON, b)
case C_S32CON:
return cmp(C_U31CON, b) || cmp(C_S16CON, b)
case C_32CON:
return cmp(C_S32CON, b) || cmp(C_U32CON, b)
case C_S34CON:
return cmp(C_32CON, b)
case C_64CON:
@ -1160,9 +1158,6 @@ func cmp(a int, b int) bool {
case C_LACON:
return cmp(C_SACON, b)
case C_LBRA:
return cmp(C_SBRA, b)
case C_SOREG:
return cmp(C_ZOREG, b)
@ -2541,34 +2536,26 @@ func asmout(c *ctxt9, p *obj.Prog, o *Optab, out *[5]uint32) {
}
o1 = AOP_RRR(c.oprrr(p.As), uint32(p.To.Reg), uint32(r), uint32(p.From.Reg))
case 3: /* mov $soreg/addcon/andcon/ucon, r ==> addis/oris/addi/ori $i,reg',r */
case 3: /* mov $soreg/16con, r ==> addi/ori $i,reg',r */
d := c.vregoff(&p.From)
v := int32(d)
r := int(p.From.Reg)
// p.From may be a constant value or an offset(reg) type argument.
isZeroOrR0 := r&0x1f == 0
if r0iszero != 0 /*TypeKind(100016)*/ && p.To.Reg == 0 && (r != 0 || v != 0) {
c.ctxt.Diag("literal operation on R0\n%v", p)
}
a := OP_ADDI
if int64(int16(d)) != d {
// Operand is 16 bit value with sign bit set
if o.a1 == C_ANDCON {
// Needs unsigned 16 bit so use ORI
if isZeroOrR0 {
o1 = LOP_IRR(uint32(OP_ORI), uint32(p.To.Reg), uint32(0), uint32(v))
break
}
// With ADDCON, needs signed 16 bit value, fall through to use ADDI
} else if o.a1 != C_ADDCON {
log.Fatalf("invalid handling of %v", p)
if int64(int16(d)) == d {
// MOVD $int16, Ry or MOVD $offset(Rx), Ry
o1 = AOP_IRR(uint32(OP_ADDI), uint32(p.To.Reg), uint32(r), uint32(v))
} else {
// MOVD $uint16, Ry
if int64(uint16(d)) != d || (r != 0 && r != REGZERO) {
c.ctxt.Diag("Rule expects a uint16 constant load. got:\n%v", p)
}
o1 = LOP_IRR(uint32(OP_ORI), uint32(p.To.Reg), uint32(0), uint32(v))
}
o1 = AOP_IRR(uint32(a), uint32(p.To.Reg), uint32(r), uint32(v))
case 4: /* add/mul $scon,[r1],r2 */
v := c.regoff(&p.From)
@ -2654,7 +2641,7 @@ func asmout(c *ctxt9, p *obj.Prog, o *Optab, out *[5]uint32) {
}
o1 = AOP_RRR(c.oprrr(p.As), uint32(p.To.Reg), uint32(p.From.Reg), uint32(r))
case 11: /* br/bl lbra */
case 11: /* br/bl bra */
v := int32(0)
if p.To.Target() != nil {
@ -2688,7 +2675,7 @@ func asmout(c *ctxt9, p *obj.Prog, o *Optab, out *[5]uint32) {
case 13: /* mov[bhwd]{z,} r,r */
// This needs to handle "MOV* $0, Rx". This shows up because $0 also
// matches C_REG if r0iszero. This happens because C_REG sorts before C_ANDCON
// matches C_REG if r0iszero. This happens because C_REG sorts before C_U16CON
// TODO: fix the above behavior and cleanup this exception.
if p.From.Type == obj.TYPE_CONST {
o1 = LOP_IRR(OP_ADDI, REGZERO, uint32(p.To.Reg), 0)
@ -2776,8 +2763,7 @@ func asmout(c *ctxt9, p *obj.Prog, o *Optab, out *[5]uint32) {
c.ctxt.Diag("unexpected op in rldc case\n%v", p)
}
case 17, /* bc bo,bi,lbra (same for now) */
16: /* bc bo,bi,sbra */
case 16: /* bc bo,bi,bra */
a := 0
r := int(p.Reg)
@ -2921,8 +2907,8 @@ func asmout(c *ctxt9, p *obj.Prog, o *Optab, out *[5]uint32) {
r = int(p.To.Reg)
}
// With ADDCON operand, generate 2 instructions using ADDI for signed value,
// with LCON operand generate 3 instructions.
// With S16CON operand, generate 2 instructions using ADDI for signed value,
// with 32CON operand generate 3 instructions.
if o.size == 8 {
o1 = LOP_IRR(OP_ADDI, REGZERO, REGTMP, uint32(int32(d)))
o2 = LOP_RRR(c.oprrr(p.As), uint32(p.To.Reg), REGTMP, uint32(r))

View File

@ -516,17 +516,19 @@ func TestAddrClassifier(t *testing.T) {
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 32}, C_U8CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 << 14}, C_U15CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 << 15}, C_U16CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 + 1<<16}, C_U32CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 + 1<<16}, C_U31CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 << 31}, C_U32CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 << 32}, C_S34CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 1 << 33}, C_64CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: -1}, C_S16CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: -0x10001}, C_S32CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: 0x10001}, C_U31CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: -(1 << 33)}, C_S34CON},
{obj.Addr{Type: obj.TYPE_CONST, Name: obj.NAME_NONE, Offset: -(1 << 34)}, C_64CON},
// Branch like arguments
{obj.Addr{Type: obj.TYPE_BRANCH, Sym: &obj.LSym{Type: objabi.SDATA}}, cmplx{C_SBRA, C_LBRAPIC, C_LBRAPIC, C_SBRA}},
{obj.Addr{Type: obj.TYPE_BRANCH}, C_SBRA},
{obj.Addr{Type: obj.TYPE_BRANCH, Sym: &obj.LSym{Type: objabi.SDATA}}, cmplx{C_BRA, C_BRAPIC, C_BRAPIC, C_BRA}},
{obj.Addr{Type: obj.TYPE_BRANCH}, C_BRA},
}
pic_ctxt9 := ctxt9{ctxt: &obj.Link{Flag_shared: true, Arch: &Linkppc64}, autosize: 0}

View File

@ -45,6 +45,7 @@ import (
"fmt"
"internal/abi"
"log"
"math/rand"
"os"
"sort"
"strconv"
@ -122,10 +123,11 @@ func trampoline(ctxt *Link, s loader.Sym) {
}
if ldr.SymValue(rs) == 0 && ldr.SymType(rs) != sym.SDYNIMPORT && ldr.SymType(rs) != sym.SUNDEFEXT {
// Symbols in the same package are laid out together.
// Symbols in the same package are laid out together (if we
// don't randomize the function order).
// Except that if SymPkg(s) == "", it is a host object symbol
// which may call an external symbol via PLT.
if ldr.SymPkg(s) != "" && ldr.SymPkg(rs) == ldr.SymPkg(s) {
if ldr.SymPkg(s) != "" && ldr.SymPkg(rs) == ldr.SymPkg(s) && *flagRandLayout == 0 {
// RISC-V is only able to reach +/-1MiB via a JAL instruction.
// We need to generate a trampoline when an address is
// currently unknown.
@ -134,7 +136,7 @@ func trampoline(ctxt *Link, s loader.Sym) {
}
}
// Runtime packages are laid out together.
if isRuntimeDepPkg(ldr.SymPkg(s)) && isRuntimeDepPkg(ldr.SymPkg(rs)) {
if isRuntimeDepPkg(ldr.SymPkg(s)) && isRuntimeDepPkg(ldr.SymPkg(rs)) && *flagRandLayout == 0 {
continue
}
}
@ -2397,6 +2399,26 @@ func (ctxt *Link) textaddress() {
ldr := ctxt.loader
if *flagRandLayout != 0 {
r := rand.New(rand.NewSource(*flagRandLayout))
textp := ctxt.Textp
i := 0
// don't move the buildid symbol
if len(textp) > 0 && ldr.SymName(textp[0]) == "go:buildid" {
i++
}
// Skip over C symbols, as functions in a (C object) section must stay together.
// TODO: maybe we can move a section as a whole.
// Note: we load C symbols before Go symbols, so we can scan from the start.
for i < len(textp) && (ldr.SubSym(textp[i]) != 0 || ldr.AttrSubSymbol(textp[i])) {
i++
}
textp = textp[i:]
r.Shuffle(len(textp), func(i, j int) {
textp[i], textp[j] = textp[j], textp[i]
})
}
text := ctxt.xdefine("runtime.text", sym.STEXT, 0)
etext := ctxt.xdefine("runtime.etext", sym.STEXT, 0)
ldr.SetSymSect(text, sect)

View File

@ -201,7 +201,7 @@ func (d *deadcodePass) flood() {
rs := r.Sym()
if d.ldr.IsItab(rs) {
// This relocation can also point at an itab, in which case it
// means "the _type field of that itab".
// means "the Type field of that itab".
rs = decodeItabType(d.ldr, d.ctxt.Arch, rs)
}
if !d.ldr.IsGoType(rs) && !d.ctxt.linkShared {

View File

@ -301,8 +301,8 @@ func decodetypeGcprogShlib(ctxt *Link, data []byte) uint64 {
return decodeInuxi(ctxt.Arch, data[2*int32(ctxt.Arch.PtrSize)+8+1*int32(ctxt.Arch.PtrSize):], ctxt.Arch.PtrSize)
}
// decodeItabType returns the itab._type field from an itab.
// decodeItabType returns the itab.Type field from an itab.
func decodeItabType(ldr *loader.Loader, arch *sys.Arch, symIdx loader.Sym) loader.Sym {
relocs := ldr.Relocs(symIdx)
return decodeRelocSym(ldr, symIdx, &relocs, int32(arch.PtrSize))
return decodeRelocSym(ldr, symIdx, &relocs, int32(abi.ITabTypeOff(arch.PtrSize)))
}

View File

@ -1786,7 +1786,7 @@ func dwarfGenerateDebugInfo(ctxt *Link) {
"type:internal/abi.SliceType",
"type:internal/abi.StructType",
"type:internal/abi.InterfaceType",
"type:runtime.itab",
"type:internal/abi.ITab",
"type:internal/abi.Imethod"} {
d.defgotype(d.lookupOrDiag(typ))
}

View File

@ -65,7 +65,7 @@ func TestRuntimeTypesPresent(t *testing.T) {
"internal/abi.SliceType": true,
"internal/abi.StructType": true,
"internal/abi.InterfaceType": true,
"runtime.itab": true,
"internal/abi.ITab": true,
}
found := findTypes(t, dwarf, want)

View File

@ -478,13 +478,11 @@ func (ctxt *Link) domacho() {
if ctxt.LinkMode == LinkInternal && machoPlatform == PLATFORM_MACOS {
var version uint32
switch ctxt.Arch.Family {
case sys.AMD64:
case sys.ARM64, sys.AMD64:
// This must be fairly recent for Apple signing (go.dev/issue/30488).
// Having too old a version here was also implicated in some problems
// calling into macOS libraries (go.dev/issue/56784).
// In general this can be the most recent supported macOS version.
version = 10<<16 | 13<<8 | 0<<0 // 10.13.0
case sys.ARM64:
version = 11<<16 | 0<<8 | 0<<0 // 11.0.0
}
ml := newMachoLoad(ctxt.Arch, LC_BUILD_VERSION, 4)

View File

@ -102,6 +102,7 @@ var (
FlagTextAddr = flag.Int64("T", -1, "set the start address of text symbols")
flagEntrySymbol = flag.String("E", "", "set `entry` symbol name")
flagPruneWeakMap = flag.Bool("pruneweakmap", true, "prune weak mapinit refs")
flagRandLayout = flag.Int64("randlayout", 0, "randomize function layout")
cpuprofile = flag.String("cpuprofile", "", "write cpu profile to `file`")
memprofile = flag.String("memprofile", "", "write memory profile to `file`")
memprofilerate = flag.Int64("memprofilerate", 0, "set runtime.MemProfileRate to `rate`")

View File

@ -827,9 +827,8 @@ func expandGoroot(s string) string {
}
const (
BUCKETSIZE = 256 * abi.MINFUNC
SUBBUCKETS = 16
SUBBUCKETSIZE = BUCKETSIZE / SUBBUCKETS
SUBBUCKETSIZE = abi.FuncTabBucketSize / SUBBUCKETS
NOIDX = 0x7fffffff
)
@ -847,7 +846,7 @@ func (ctxt *Link) findfunctab(state *pclntab, container loader.Bitmap) {
// that map to that subbucket.
n := int32((max - min + SUBBUCKETSIZE - 1) / SUBBUCKETSIZE)
nbuckets := int32((max - min + BUCKETSIZE - 1) / BUCKETSIZE)
nbuckets := int32((max - min + abi.FuncTabBucketSize - 1) / abi.FuncTabBucketSize)
size := 4*int64(nbuckets) + int64(n)
@ -878,7 +877,7 @@ func (ctxt *Link) findfunctab(state *pclntab, container loader.Bitmap) {
q = ldr.SymValue(e)
}
//print("%d: [%lld %lld] %s\n", idx, p, q, s->name);
//fmt.Printf("%d: [%x %x] %s\n", idx, p, q, ldr.SymName(s))
for ; p < q; p += SUBBUCKETSIZE {
i = int((p - min) / SUBBUCKETSIZE)
if indexes[i] > idx {

View File

@ -242,10 +242,6 @@ func parseArmAttributes(e binary.ByteOrder, data []byte) (found bool, ehdrFlags
// object, and the returned ehdrFlags contains what this Load function computes.
// TODO: find a better place for this logic.
func Load(l *loader.Loader, arch *sys.Arch, localSymVersion int, f *bio.Reader, pkg string, length int64, pn string, initEhdrFlags uint32) (textp []loader.Sym, ehdrFlags uint32, err error) {
newSym := func(name string, version int) loader.Sym {
return l.CreateStaticSym(name)
}
lookup := l.LookupOrCreateCgoExport
errorf := func(str string, args ...interface{}) ([]loader.Sym, uint32, error) {
return nil, 0, fmt.Errorf("loadelf: %s: %v", pn, fmt.Sprintf(str, args...))
}
@ -515,7 +511,7 @@ func Load(l *loader.Loader, arch *sys.Arch, localSymVersion int, f *bio.Reader,
}
sectsymNames[name] = true
sb := l.MakeSymbolUpdater(lookup(name, localSymVersion))
sb := l.MakeSymbolUpdater(l.LookupOrCreateCgoExport(name, localSymVersion))
switch sect.flags & (elf.SHF_ALLOC | elf.SHF_WRITE | elf.SHF_EXECINSTR) {
default:
@ -556,7 +552,7 @@ func Load(l *loader.Loader, arch *sys.Arch, localSymVersion int, f *bio.Reader,
for i := 1; i < elfobj.nsymtab; i++ {
var elfsym ElfSym
if err := readelfsym(newSym, lookup, l, arch, elfobj, i, &elfsym, 1, localSymVersion); err != nil {
if err := readelfsym(l, arch, elfobj, i, &elfsym, 1, localSymVersion); err != nil {
return errorf("%s: malformed elf file: %v", pn, err)
}
symbols[i] = elfsym.sym
@ -770,7 +766,7 @@ func Load(l *loader.Loader, arch *sys.Arch, localSymVersion int, f *bio.Reader,
rSym = 0
} else {
var elfsym ElfSym
if err := readelfsym(newSym, lookup, l, arch, elfobj, int(symIdx), &elfsym, 0, 0); err != nil {
if err := readelfsym(l, arch, elfobj, int(symIdx), &elfsym, 0, 0); err != nil {
return errorf("malformed elf file: %v", err)
}
elfsym.sym = symbols[symIdx]
@ -847,7 +843,7 @@ func elfmap(elfobj *ElfObj, sect *ElfSect) (err error) {
return nil
}
func readelfsym(newSym, lookup func(string, int) loader.Sym, l *loader.Loader, arch *sys.Arch, elfobj *ElfObj, i int, elfsym *ElfSym, needSym int, localSymVersion int) (err error) {
func readelfsym(l *loader.Loader, arch *sys.Arch, elfobj *ElfObj, i int, elfsym *ElfSym, needSym int, localSymVersion int) (err error) {
if i >= elfobj.nsymtab || i < 0 {
err = fmt.Errorf("invalid elf symbol index")
return err
@ -898,7 +894,7 @@ func readelfsym(newSym, lookup func(string, int) loader.Sym, l *loader.Loader, a
switch elfsym.bind {
case elf.STB_GLOBAL:
if needSym != 0 {
s = lookup(elfsym.name, 0)
s = l.LookupOrCreateCgoExport(elfsym.name, 0)
// for global scoped hidden symbols we should insert it into
// symbol hash table, but mark them as hidden.
@ -927,7 +923,7 @@ func readelfsym(newSym, lookup func(string, int) loader.Sym, l *loader.Loader, a
// We need to be able to look this up,
// so put it in the hash table.
if needSym != 0 {
s = lookup(elfsym.name, localSymVersion)
s = l.LookupOrCreateCgoExport(elfsym.name, localSymVersion)
l.SetAttrVisibilityHidden(s, true)
}
break
@ -940,13 +936,13 @@ func readelfsym(newSym, lookup func(string, int) loader.Sym, l *loader.Loader, a
// FIXME: pass empty string here for name? This would
// reduce mem use, but also (possibly) make it harder
// to debug problems.
s = newSym(elfsym.name, localSymVersion)
s = l.CreateStaticSym(elfsym.name)
l.SetAttrVisibilityHidden(s, true)
}
case elf.STB_WEAK:
if needSym != 0 {
s = lookup(elfsym.name, 0)
s = l.LookupOrCreateCgoExport(elfsym.name, 0)
if elfsym.other == 2 {
l.SetAttrVisibilityHidden(s, true)
}

View File

@ -348,7 +348,7 @@ func TestXFlag(t *testing.T) {
}
}
var testMachOBuildVersionSrc = `
var trivialSrc = `
package main
func main() { }
`
@ -361,7 +361,7 @@ func TestMachOBuildVersion(t *testing.T) {
tmpdir := t.TempDir()
src := filepath.Join(tmpdir, "main.go")
err := os.WriteFile(src, []byte(testMachOBuildVersionSrc), 0666)
err := os.WriteFile(src, []byte(trivialSrc), 0666)
if err != nil {
t.Fatal(err)
}
@ -388,9 +388,9 @@ func TestMachOBuildVersion(t *testing.T) {
found := false
const LC_BUILD_VERSION = 0x32
checkMin := func(ver uint32) {
major, minor := (ver>>16)&0xff, (ver>>8)&0xff
if major != 10 || minor < 9 {
t.Errorf("LC_BUILD_VERSION version %d.%d < 10.9", major, minor)
major, minor, patch := (ver>>16)&0xff, (ver>>8)&0xff, (ver>>0)&0xff
if major < 11 {
t.Errorf("LC_BUILD_VERSION version %d.%d.%d < 11.0.0", major, minor, patch)
}
}
for _, cmd := range exem.Loads {
@ -1375,3 +1375,43 @@ func TestFlagS(t *testing.T) {
}
}
}
func TestRandLayout(t *testing.T) {
// Test that the -randlayout flag randomizes function order and
// generates a working binary.
testenv.MustHaveGoBuild(t)
t.Parallel()
tmpdir := t.TempDir()
src := filepath.Join(tmpdir, "hello.go")
err := os.WriteFile(src, []byte(trivialSrc), 0666)
if err != nil {
t.Fatal(err)
}
var syms [2]string
for i, seed := range []string{"123", "456"} {
exe := filepath.Join(tmpdir, "hello"+seed+".exe")
cmd := testenv.Command(t, testenv.GoToolPath(t), "build", "-ldflags=-randlayout="+seed, "-o", exe, src)
out, err := cmd.CombinedOutput()
if err != nil {
t.Fatalf("build failed: %v\n%s", err, out)
}
cmd = testenv.Command(t, exe)
err = cmd.Run()
if err != nil {
t.Fatalf("executable failed to run: %v\n%s", err, out)
}
cmd = testenv.Command(t, testenv.GoToolPath(t), "tool", "nm", exe)
out, err = cmd.CombinedOutput()
if err != nil {
t.Fatalf("fail to run \"go tool nm\": %v\n%s", err, out)
}
syms[i] = string(out)
}
if syms[0] == syms[1] {
t.Errorf("randlayout with different seeds produced same layout:\n%s\n===\n\n%s", syms[0], syms[1])
}
}

View File

@ -35,11 +35,11 @@ import (
// Header
// caller_name
// callee_name
// "call site offset" "caller's start line number" "flat" "cum" "call edge weight"
// "call site offset" "call edge weight"
// ...
// caller_name
// callee_name
// "call site offset" "caller's start line number" "flat" "cum" "call edge weight"
// "call site offset" "call edge weight"
func usage() {
fmt.Fprintf(os.Stderr, "MUST have (pprof) input file \n")
@ -52,13 +52,6 @@ type NodeMapKey struct {
CallerName string
CalleeName string
CallSiteOffset int // Line offset from function start line.
CallStartLine int // Start line of the function. Can be 0 which means missing.
}
type Weights struct {
NFlat int64
NCum int64
EWeight int64
}
func readPprofFile(profileFile string, outputFile string, verbose bool) bool {
@ -101,33 +94,19 @@ func readPprofFile(profileFile string, outputFile string, verbose bool) bool {
SampleValue: func(v []int64) int64 { return v[valueIndex] },
})
nFlat := make(map[string]int64)
nCum := make(map[string]int64)
// Accummulate weights for the same node.
for _, n := range g.Nodes {
canonicalName := n.Info.Name
nFlat[canonicalName] += n.FlatValue()
nCum[canonicalName] += n.CumValue()
}
TotalNodeWeight := int64(0)
TotalEdgeWeight := int64(0)
NodeMap := make(map[NodeMapKey]*Weights)
NodeWeightMap := make(map[string]int64)
NodeMap := make(map[NodeMapKey]int64)
for _, n := range g.Nodes {
TotalNodeWeight += n.FlatValue()
canonicalName := n.Info.Name
// Create the key to the nodeMapKey.
nodeinfo := NodeMapKey{
CallerName: canonicalName,
CallSiteOffset: n.Info.Lineno - n.Info.StartLine,
CallStartLine: n.Info.StartLine,
}
if nodeinfo.CallStartLine == 0 {
if n.Info.StartLine == 0 {
if verbose {
log.Println("[PGO] warning: " + canonicalName + " relative line number is missing from the profile")
}
@ -137,27 +116,14 @@ func readPprofFile(profileFile string, outputFile string, verbose bool) bool {
TotalEdgeWeight += e.WeightValue()
nodeinfo.CalleeName = e.Dest.Info.Name
if w, ok := NodeMap[nodeinfo]; ok {
w.EWeight += e.WeightValue()
w += e.WeightValue()
} else {
weights := new(Weights)
weights.NFlat = nFlat[canonicalName]
weights.NCum = nCum[canonicalName]
weights.EWeight = e.WeightValue()
NodeMap[nodeinfo] = weights
w = e.WeightValue()
NodeMap[nodeinfo] = w
}
}
}
for _, n := range g.Nodes {
lineno := fmt.Sprintf("%v", n.Info.Lineno)
canonicalName := n.Info.Name + "-" + lineno
if _, ok := (NodeWeightMap)[canonicalName]; ok {
(NodeWeightMap)[canonicalName] += n.CumValue()
} else {
(NodeWeightMap)[canonicalName] = n.CumValue()
}
}
var fNodeMap *os.File
if outputFile == "" {
fNodeMap = os.Stdout
@ -190,16 +156,13 @@ func readPprofFile(profileFile string, outputFile string, verbose bool) bool {
line = key.CalleeName + "\n"
w.WriteString(line)
line = strconv.Itoa(key.CallSiteOffset)
line = line + separator + strconv.Itoa(key.CallStartLine)
line = line + separator + strconv.FormatInt(element.NFlat, 10)
line = line + separator + strconv.FormatInt(element.NCum, 10)
line = line + separator + strconv.FormatInt(element.EWeight, 10) + "\n"
line = line + separator + strconv.FormatInt(element, 10) + "\n"
w.WriteString(line)
w.Flush()
count += 1
}
if TotalNodeWeight == 0 || TotalEdgeWeight == 0 {
if TotalEdgeWeight == 0 {
return false
}

View File

@ -17,7 +17,6 @@ import (
"net/http"
"slices"
"sort"
"strconv"
"strings"
"time"
)
@ -25,31 +24,23 @@ import (
// GoroutinesHandlerFunc returns a HandlerFunc that serves list of goroutine groups.
func GoroutinesHandlerFunc(summaries map[tracev2.GoID]*trace.GoroutineSummary) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// goroutineGroup describes a group of goroutines grouped by start PC.
// goroutineGroup describes a group of goroutines grouped by name.
type goroutineGroup struct {
ID uint64 // Unique identifier (PC).
Name string // Start function.
N int // Total number of goroutines in this group.
ExecTime time.Duration // Total execution time of all goroutines in this group.
}
// Accumulate groups by PC.
groupsByPC := make(map[uint64]goroutineGroup)
// Accumulate groups by Name.
groupsByName := make(map[string]goroutineGroup)
for _, summary := range summaries {
group := groupsByPC[summary.PC]
group.ID = summary.PC
group := groupsByName[summary.Name]
group.Name = summary.Name
group.N++
group.ExecTime += summary.ExecTime
groupsByPC[summary.PC] = group
groupsByName[summary.Name] = group
}
var groups []goroutineGroup
for pc, group := range groupsByPC {
group.ID = pc
// If goroutine didn't run during the trace (no sampled PC),
// the v.ID and v.Name will be zero value.
if group.ID == 0 && group.Name == "" {
group.Name = "(Inactive, no stack trace sampled)"
}
for _, group := range groupsByName {
groups = append(groups, group)
}
slices.SortFunc(groups, func(a, b goroutineGroup) int {
@ -92,7 +83,7 @@ Click a start location to view more details about that group.<br>
</tr>
{{range $}}
<tr>
<td><code><a href="/goroutine?id={{.ID}}">{{.Name}}</a></code></td>
<td><code><a href="/goroutine?name={{.Name}}">{{or .Name "(Inactive, no stack trace sampled)"}}</a></code></td>
<td>{{.N}}</td>
<td>{{.ExecTime}}</td>
</tr>
@ -106,11 +97,7 @@ Click a start location to view more details about that group.<br>
// goroutines in a particular group.
func GoroutineHandler(summaries map[tracev2.GoID]*trace.GoroutineSummary) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
pc, err := strconv.ParseUint(r.FormValue("id"), 10, 64)
if err != nil {
http.Error(w, fmt.Sprintf("failed to parse id parameter '%v': %v", r.FormValue("id"), err), http.StatusInternalServerError)
return
}
goroutineName := r.FormValue("name")
type goroutine struct {
*trace.GoroutineSummary
@ -130,7 +117,7 @@ func GoroutineHandler(summaries map[tracev2.GoID]*trace.GoroutineSummary) http.H
for _, summary := range summaries {
totalExecTime += summary.ExecTime
if summary.PC != pc {
if summary.Name != goroutineName {
continue
}
nonOverlappingStats := summary.NonOverlappingStats()
@ -198,9 +185,8 @@ func GoroutineHandler(summaries map[tracev2.GoID]*trace.GoroutineSummary) http.H
}
sort.Strings(allRangeStats)
err = templGoroutine.Execute(w, struct {
err := templGoroutine.Execute(w, struct {
Name string
PC uint64
N int
ExecTimePercent string
MaxTotal time.Duration
@ -209,7 +195,6 @@ func GoroutineHandler(summaries map[tracev2.GoID]*trace.GoroutineSummary) http.H
RangeStats []string
}{
Name: name,
PC: pc,
N: len(goroutines),
ExecTimePercent: execTimePercent,
MaxTotal: maxTotalTime,
@ -339,19 +324,19 @@ Table of contents
</tr>
<tr>
<td>Network wait profile:</td>
<td> <a href="/io?id={{.PC}}">graph</a> <a href="/io?id={{.PC}}&raw=1" download="io.profile">(download)</a></td>
<td> <a href="/io?name={{.Name}}">graph</a> <a href="/io?name={{.Name}}&raw=1" download="io.profile">(download)</a></td>
</tr>
<tr>
<td>Sync block profile:</td>
<td> <a href="/block?id={{.PC}}">graph</a> <a href="/block?id={{.PC}}&raw=1" download="block.profile">(download)</a></td>
<td> <a href="/block?name={{.Name}}">graph</a> <a href="/block?name={{.Name}}&raw=1" download="block.profile">(download)</a></td>
</tr>
<tr>
<td>Syscall profile:</td>
<td> <a href="/syscall?id={{.PC}}">graph</a> <a href="/syscall?id={{.PC}}&raw=1" download="syscall.profile">(download)</a></td>
<td> <a href="/syscall?name={{.Name}}">graph</a> <a href="/syscall?name={{.Name}}&raw=1" download="syscall.profile">(download)</a></td>
</tr>
<tr>
<td>Scheduler wait profile:</td>
<td> <a href="/sched?id={{.PC}}">graph</a> <a href="/sched?id={{.PC}}&raw=1" download="sched.profile">(download)</a></td>
<td> <a href="/sched?name={{.Name}}">graph</a> <a href="/sched?name={{.Name}}&raw=1" download="sched.profile">(download)</a></td>
</tr>
</table>

View File

@ -14,15 +14,14 @@ import (
tracev2 "internal/trace/v2"
"net/http"
"slices"
"strconv"
"strings"
"time"
)
func pprofByGoroutine(compute computePprofFunc, t *parsedTrace) traceviewer.ProfileFunc {
return func(r *http.Request) ([]traceviewer.ProfileRecord, error) {
id := r.FormValue("id")
gToIntervals, err := pprofMatchingGoroutines(id, t)
name := r.FormValue("name")
gToIntervals, err := pprofMatchingGoroutines(name, t)
if err != nil {
return nil, err
}
@ -44,20 +43,12 @@ func pprofByRegion(compute computePprofFunc, t *parsedTrace) traceviewer.Profile
}
}
// pprofMatchingGoroutines parses the goroutine type id string (i.e. pc)
// and returns the ids of goroutines of the matching type and its interval.
// pprofMatchingGoroutines returns the ids of goroutines of the matching name and its interval.
// If the id string is empty, returns nil without an error.
func pprofMatchingGoroutines(id string, t *parsedTrace) (map[tracev2.GoID][]interval, error) {
if id == "" {
return nil, nil
}
pc, err := strconv.ParseUint(id, 10, 64) // id is string
if err != nil {
return nil, fmt.Errorf("invalid goroutine type: %v", id)
}
func pprofMatchingGoroutines(name string, t *parsedTrace) (map[tracev2.GoID][]interval, error) {
res := make(map[tracev2.GoID][]interval)
for _, g := range t.summary.Goroutines {
if g.PC != pc {
if g.Name != name {
continue
}
endTime := g.EndTime
@ -66,8 +57,8 @@ func pprofMatchingGoroutines(id string, t *parsedTrace) (map[tracev2.GoID][]inte
}
res[g.ID] = []interval{{start: g.StartTime, end: endTime}}
}
if len(res) == 0 && id != "" {
return nil, fmt.Errorf("failed to find matching goroutines for ID: %s", id)
if len(res) == 0 {
return nil, fmt.Errorf("failed to find matching goroutines for name: %s", name)
}
return res, nil
}

View File

@ -186,9 +186,10 @@ type ObjFile interface {
// A Frame describes a single line in a source file.
type Frame struct {
Func string // name of function
File string // source file name
Line int // line in file
Func string // name of function
File string // source file name
Line int // line in file
Column int // column in file
}
// A Sym describes a single symbol in an object file.

View File

@ -129,6 +129,7 @@ func (d *llvmSymbolizer) readFrame() (plugin.Frame, bool) {
}
linenumber := 0
columnnumber := 0
// The llvm-symbolizer outputs the <file_name>:<line_number>:<column_number>.
// When it cannot identify the source code location, it outputs "??:0:0".
// Older versions output just the filename and line number, so we check for
@ -137,22 +138,27 @@ func (d *llvmSymbolizer) readFrame() (plugin.Frame, bool) {
fileline = ""
} else {
switch split := strings.Split(fileline, ":"); len(split) {
case 1:
// filename
fileline = split[0]
case 2, 3:
// filename:line , or
// filename:line:disc , or
fileline = split[0]
case 3:
// filename:line:column
if col, err := strconv.Atoi(split[2]); err == nil {
columnnumber = col
}
fallthrough
case 2:
// filename:line
if line, err := strconv.Atoi(split[1]); err == nil {
linenumber = line
}
fallthrough
case 1:
// filename
fileline = split[0]
default:
// Unrecognized, ignore
}
}
return plugin.Frame{Func: funcname, File: fileline, Line: linenumber}, false
return plugin.Frame{Func: funcname, File: fileline, Line: linenumber, Column: columnnumber}, false
}
// addrInfo returns the stack frame information for a specific program

View File

@ -247,6 +247,8 @@ var configHelp = map[string]string{
"noinlines": helpText(
"Ignore inlines.",
"Attributes inlined functions to their first out-of-line caller."),
"showcolumns": helpText(
"Show column numbers at the source code line level."),
}
func helpText(s ...string) string {

View File

@ -51,6 +51,7 @@ type config struct {
TagShow string `json:"tagshow,omitempty"`
TagHide string `json:"taghide,omitempty"`
NoInlines bool `json:"noinlines,omitempty"`
ShowColumns bool `json:"showcolumns,omitempty"`
// Output granularity
Granularity string `json:"granularity,omitempty"`
@ -157,6 +158,7 @@ func init() {
"sort": "sort",
"granularity": "g",
"noinlines": "noinlines",
"showcolumns": "showcolumns",
}
def := defaultConfig()

View File

@ -256,7 +256,7 @@ func aggregate(prof *profile.Profile, cfg config) error {
default:
return fmt.Errorf("unexpected granularity")
}
return prof.Aggregate(inlines, function, filename, linenumber, address)
return prof.Aggregate(inlines, function, filename, linenumber, cfg.ShowColumns, address)
}
func reportOptions(p *profile.Profile, numLabelUnits map[string]string, cfg config) (*report.Options, error) {

View File

@ -492,17 +492,23 @@ mapping:
func fetch(source string, duration, timeout time.Duration, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, src string, err error) {
var f io.ReadCloser
if sourceURL, timeout := adjustURL(source, duration, timeout); sourceURL != "" {
ui.Print("Fetching profile over HTTP from " + sourceURL)
if duration > 0 {
ui.Print(fmt.Sprintf("Please wait... (%v)", duration))
// First determine whether the source is a file, if not, it will be treated as a URL.
if _, openErr := os.Stat(source); openErr == nil {
if isPerfFile(source) {
f, err = convertPerfData(source, ui)
} else {
f, err = os.Open(source)
}
f, err = fetchURL(sourceURL, timeout, tr)
src = sourceURL
} else if isPerfFile(source) {
f, err = convertPerfData(source, ui)
} else {
f, err = os.Open(source)
sourceURL, timeout := adjustURL(source, duration, timeout)
if sourceURL != "" {
ui.Print("Fetching profile over HTTP from " + sourceURL)
if duration > 0 {
ui.Print(fmt.Sprintf("Please wait... (%v)", duration))
}
f, err = fetchURL(sourceURL, timeout, tr)
src = sourceURL
}
}
if err == nil {
defer f.Close()

View File

@ -1,106 +0,0 @@
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package driver
import (
"encoding/json"
"html/template"
"net/http"
"strings"
"github.com/google/pprof/internal/graph"
"github.com/google/pprof/internal/measurement"
"github.com/google/pprof/internal/report"
)
type treeNode struct {
Name string `json:"n"`
FullName string `json:"f"`
Cum int64 `json:"v"`
CumFormat string `json:"l"`
Percent string `json:"p"`
Children []*treeNode `json:"c"`
}
// flamegraph generates a web page containing a flamegraph.
func (ui *webInterface) flamegraph(w http.ResponseWriter, req *http.Request) {
// Force the call tree so that the graph is a tree.
// Also do not trim the tree so that the flame graph contains all functions.
rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) {
cfg.CallTree = true
cfg.Trim = false
})
if rpt == nil {
return // error already reported
}
// Generate dot graph.
g, config := report.GetDOT(rpt)
var nodes []*treeNode
nroots := 0
rootValue := int64(0)
nodeArr := []string{}
nodeMap := map[*graph.Node]*treeNode{}
// Make all nodes and the map, collect the roots.
for _, n := range g.Nodes {
v := n.CumValue()
fullName := n.Info.PrintableName()
node := &treeNode{
Name: graph.ShortenFunctionName(fullName),
FullName: fullName,
Cum: v,
CumFormat: config.FormatValue(v),
Percent: strings.TrimSpace(measurement.Percentage(v, config.Total)),
}
nodes = append(nodes, node)
if len(n.In) == 0 {
nodes[nroots], nodes[len(nodes)-1] = nodes[len(nodes)-1], nodes[nroots]
nroots++
rootValue += v
}
nodeMap[n] = node
// Get all node names into an array.
nodeArr = append(nodeArr, n.Info.Name)
}
// Populate the child links.
for _, n := range g.Nodes {
node := nodeMap[n]
for child := range n.Out {
node.Children = append(node.Children, nodeMap[child])
}
}
rootNode := &treeNode{
Name: "root",
FullName: "root",
Cum: rootValue,
CumFormat: config.FormatValue(rootValue),
Percent: strings.TrimSpace(measurement.Percentage(rootValue, config.Total)),
Children: nodes[0:nroots],
}
// JSON marshalling flame graph
b, err := json.Marshal(rootNode)
if err != nil {
http.Error(w, "error serializing flame graph", http.StatusInternalServerError)
ui.options.UI.PrintErr(err)
return
}
ui.render(w, req, "flamegraph", rpt, errList, config.Labels, webArgs{
FlameGraph: template.JS(b),
Nodes: nodeArr,
})
}

View File

@ -558,11 +558,6 @@ function viewer(baseUrl, nodes, options) {
return null;
}
// convert a string to a regexp that matches that string.
function quotemeta(str) {
return str.replace(/([\\\.?+*\[\](){}|^$])/g, '\\$1');
}
function setSampleIndexLink(si) {
const elem = document.getElementById('sampletype-' + si);
if (elem != null) {
@ -595,7 +590,7 @@ function viewer(baseUrl, nodes, options) {
// list-based. Construct regular expression depending on mode.
let re = regexpActive
? search.value
: Array.from(getSelection().keys()).map(key => quotemeta(nodes[key])).join('|');
: Array.from(getSelection().keys()).map(key => pprofQuoteMeta(nodes[key])).join('|');
setHrefParams(elem, function (params) {
if (re != '') {
@ -683,7 +678,7 @@ function viewer(baseUrl, nodes, options) {
}
const ids = ['topbtn', 'graphbtn',
'flamegraph', 'flamegraph2', 'flamegraphold',
'flamegraph',
'peek', 'list',
'disasm', 'focus', 'ignore', 'hide', 'show', 'show-from'];
ids.forEach(makeSearchLinkDynamic);
@ -712,3 +707,8 @@ function viewer(baseUrl, nodes, options) {
main.focus();
}
}
// convert a string to a regexp that matches exactly that string.
function pprofQuoteMeta(str) {
return '^' + str.replace(/([\\\.?+*\[\](){}|^$])/g, '\\$1') + '$';
}

View File

@ -1,103 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>{{.Title}}</title>
{{template "css" .}}
<style type="text/css">{{template "d3flamegraphcss" .}}</style>
<style type="text/css">
.flamegraph-content {
width: 90%;
min-width: 80%;
margin-left: 5%;
}
.flamegraph-details {
height: 1.2em;
width: 90%;
min-width: 90%;
margin-left: 5%;
padding: 15px 0 35px;
}
</style>
</head>
<body>
{{template "header" .}}
<div id="bodycontainer">
<div id="flamegraphdetails" class="flamegraph-details"></div>
<div class="flamegraph-content">
<div id="chart"></div>
</div>
</div>
{{template "script" .}}
<script>viewer(new URL(window.location.href), {{.Nodes}});</script>
<script>{{template "d3flamegraphscript" .}}</script>
<script>
{{- /* Deserialize as JSON instead of a JS object literal because the browser's
JSON parser can handle larger payloads than its JS parser. */ -}}
var data = JSON.parse("{{.FlameGraph}}");
var width = document.getElementById('chart').clientWidth;
var flameGraph = flamegraph()
.width(width)
.cellHeight(18)
.minFrameSize(1)
.transitionDuration(750)
.inverted(true)
.sort(true)
.title('')
.tooltip(false)
.setDetailsElement(document.getElementById('flamegraphdetails'));
// <full name> (percentage, value)
flameGraph.label((d) => d.data.f + ' (' + d.data.p + ', ' + d.data.l + ')');
flameGraph.setColorHue('warm');
select('#chart')
.datum(data)
.call(flameGraph);
function clear() {
flameGraph.clear();
}
function resetZoom() {
flameGraph.resetZoom();
}
window.addEventListener('resize', function() {
var width = document.getElementById('chart').clientWidth;
var graphs = document.getElementsByClassName('d3-flame-graph');
if (graphs.length > 0) {
graphs[0].setAttribute('width', width);
}
flameGraph.width(width);
flameGraph.resetZoom();
}, true);
var search = document.getElementById('search');
var searchAlarm = null;
function selectMatching() {
searchAlarm = null;
if (search.value != '') {
flameGraph.search(search.value);
} else {
flameGraph.clear();
}
}
function handleSearch() {
// Delay expensive processing so a flurry of key strokes is handled once.
if (searchAlarm != null) {
clearTimeout(searchAlarm);
}
searchAlarm = setTimeout(selectMatching, 300);
}
search.addEventListener('input', handleSearch);
</script>
</body>
</html>

View File

@ -12,7 +12,6 @@
<a title="{{.Help.top}}" href="./top" id="topbtn">Top</a>
<a title="{{.Help.graph}}" href="./" id="graphbtn">Graph</a>
<a title="{{.Help.flamegraph}}" href="./flamegraph" id="flamegraph">Flame Graph</a>
<a title="{{.Help.flamegraphold}}" href="./flamegraphold" id="flamegraphold">Flame Graph (old)</a>
<a title="{{.Help.peek}}" href="./peek" id="peek">Peek</a>
<a title="{{.Help.list}}" href="./source" id="list">Source</a>
<a title="{{.Help.disasm}}" href="./disasm" id="disasm">Disassemble</a>

View File

@ -75,8 +75,12 @@ function stackViewer(stacks, nodes) {
hiliter: (n, on) => { return hilite(n, on); },
current: () => {
let r = new Map();
for (let p of pivots) {
r.set(p, true);
if (pivots.length == 1 && pivots[0] == 0) {
// Not pivoting
} else {
for (let p of pivots) {
r.set(p, true);
}
}
return r;
}});
@ -145,7 +149,7 @@ function stackViewer(stacks, nodes) {
}
// Update params to include src.
let v = stacks.Sources[src].RE;
let v = pprofQuoteMeta(stacks.Sources[src].FullName);
if (param != 'f' && param != 'sf') { // old f,sf values are overwritten
// Add new source to current parameter value.
const old = params.get(param);
@ -174,7 +178,11 @@ function stackViewer(stacks, nodes) {
function switchPivots(regexp) {
// Switch URL without hitting the server.
const url = new URL(document.URL);
url.searchParams.set('p', regexp);
if (regexp === '' || regexp === '^$') {
url.searchParams.delete('p'); // Not pivoting
} else {
url.searchParams.set('p', regexp);
}
history.pushState('', '', url.toString()); // Makes back-button work
matches = new Set();
search.value = '';
@ -445,7 +453,7 @@ function stackViewer(stacks, nodes) {
r.appendChild(t);
}
r.addEventListener('click', () => { switchPivots(src.RE); });
r.addEventListener('click', () => { switchPivots(pprofQuoteMeta(src.UniqueName)); });
r.addEventListener('mouseenter', () => { handleEnter(box, r); });
r.addEventListener('mouseleave', () => { handleLeave(box); });
r.addEventListener('contextmenu', (e) => { showActionMenu(e, box); });

View File

@ -22,7 +22,7 @@ import (
"github.com/google/pprof/internal/report"
)
// stackView generates the new flamegraph view.
// stackView generates the flamegraph view.
func (ui *webInterface) stackView(w http.ResponseWriter, req *http.Request) {
// Get all data in a report.
rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) {

View File

@ -65,7 +65,7 @@ func massageSVG(svg string) string {
if loc := graphID.FindStringIndex(svg); loc != nil {
svg = svg[:loc[0]] +
`<script type="text/ecmascript"><![CDATA[` + string(svgpan.JSSource) + `]]></script>` +
`<script type="text/ecmascript"><![CDATA[` + svgpan.JSSource + `]]></script>` +
`<g id="viewport" transform="scale(0.5,0.5) translate(0,0)">` +
svg[loc[0]:]
}

View File

@ -19,8 +19,6 @@ import (
"fmt"
"html/template"
"os"
"github.com/google/pprof/third_party/d3flamegraph"
)
//go:embed html
@ -52,11 +50,7 @@ func addTemplates(templates *template.Template) {
template.Must(templates.AddParseTree(name, sub.Tree))
}
// Pre-packaged third-party files.
def("d3flamegraphscript", d3flamegraph.JSSource)
def("d3flamegraphcss", d3flamegraph.CSSSource)
// Embeded files.
// Embedded files.
def("css", loadCSS("html/common.css"))
def("header", loadFile("html/header.html"))
def("graph", loadFile("html/graph.html"))
@ -64,7 +58,7 @@ func addTemplates(templates *template.Template) {
def("top", loadFile("html/top.html"))
def("sourcelisting", loadFile("html/source.html"))
def("plaintext", loadFile("html/plaintext.html"))
def("flamegraph", loadFile("html/flamegraph.html"))
// TODO: Rename "stacks" to "flamegraph" to seal moving off d3 flamegraph.
def("stacks", loadFile("html/stacks.html"))
def("stacks_css", loadCSS("html/stacks.css"))
def("stacks_js", loadJS("html/stacks.js"))

View File

@ -112,7 +112,6 @@ func serveWebInterface(hostport string, p *profile.Profile, o *plugin.Options, d
ui.help["details"] = "Show information about the profile and this view"
ui.help["graph"] = "Display profile as a directed graph"
ui.help["flamegraph"] = "Display profile as a flame graph"
ui.help["flamegraphold"] = "Display profile as a flame graph (old version; slated for removal)"
ui.help["reset"] = "Show the entire profile"
ui.help["save_config"] = "Save current settings"
@ -130,9 +129,9 @@ func serveWebInterface(hostport string, p *profile.Profile, o *plugin.Options, d
"/disasm": http.HandlerFunc(ui.disasm),
"/source": http.HandlerFunc(ui.source),
"/peek": http.HandlerFunc(ui.peek),
"/flamegraphold": http.HandlerFunc(ui.flamegraph),
"/flamegraph": http.HandlerFunc(ui.stackView),
"/flamegraph2": http.HandlerFunc(ui.stackView), // Support older URL
"/flamegraph2": redirectWithQuery("flamegraph", http.StatusMovedPermanently), // Keep legacy URL working.
"/flamegraphold": redirectWithQuery("flamegraph", http.StatusMovedPermanently), // Keep legacy URL working.
"/saveconfig": http.HandlerFunc(ui.saveConfig),
"/deleteconfig": http.HandlerFunc(ui.deleteConfig),
"/download": http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
@ -209,15 +208,20 @@ func defaultWebServer(args *plugin.HTTPServerArgs) error {
// https://github.com/google/pprof/pull/348
mux := http.NewServeMux()
mux.Handle("/ui/", http.StripPrefix("/ui", handler))
mux.Handle("/", redirectWithQuery("/ui"))
mux.Handle("/", redirectWithQuery("/ui", http.StatusTemporaryRedirect))
s := &http.Server{Handler: mux}
return s.Serve(ln)
}
func redirectWithQuery(path string) http.HandlerFunc {
// redirectWithQuery responds with a given redirect code, preserving query
// parameters in the redirect URL. It does not convert relative paths to
// absolute paths like http.Redirect does, so that HTTPServerArgs.Handlers can
// generate relative redirects that work with the external prefixing.
func redirectWithQuery(path string, code int) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
pathWithQuery := &gourl.URL{Path: path, RawQuery: r.URL.RawQuery}
http.Redirect(w, r, pathWithQuery.String(), http.StatusTemporaryRedirect)
w.Header().Set("Location", pathWithQuery.String())
w.WriteHeader(code)
}
}

View File

@ -154,6 +154,7 @@ type NodeInfo struct {
Address uint64
File string
StartLine, Lineno int
Columnno int
Objfile string
}
@ -174,8 +175,12 @@ func (i *NodeInfo) NameComponents() []string {
switch {
case i.Lineno != 0:
s := fmt.Sprintf("%s:%d", i.File, i.Lineno)
if i.Columnno != 0 {
s += fmt.Sprintf(":%d", i.Columnno)
}
// User requested line numbers, provide what we have.
name = append(name, fmt.Sprintf("%s:%d", i.File, i.Lineno))
name = append(name, s)
case i.File != "":
// User requested file name, provide it.
name = append(name, i.File)
@ -239,6 +244,7 @@ func (nm NodeMap) FindOrInsertNode(info NodeInfo, kept NodeSet) *Node {
// Find a node that represents the whole function.
info.Address = 0
info.Lineno = 0
info.Columnno = 0
n.Function = nm.FindOrInsertNode(info, nil)
return n
}
@ -592,9 +598,10 @@ func nodeInfo(l *profile.Location, line profile.Line, objfile string, o *Options
return &NodeInfo{Address: l.Address, Objfile: objfile}
}
ni := &NodeInfo{
Address: l.Address,
Lineno: int(line.Line),
Name: line.Function.Name,
Address: l.Address,
Lineno: int(line.Line),
Columnno: int(line.Column),
Name: line.Function.Name,
}
if fname := line.Function.Filename; fname != "" {
ni.File = filepath.Clean(fname)

View File

@ -157,11 +157,12 @@ type ObjFile interface {
Close() error
}
// A Frame describes a single line in a source file.
// A Frame describes a location in a single line in a source file.
type Frame struct {
Func string // name of function
File string // source file name
Line int // line in file
Func string // name of function
File string // source file name
Line int // line in file
Column int // column in line (if available)
}
// A Sym describes a single symbol in an object file.

View File

@ -293,7 +293,7 @@ func (rpt *Report) newGraph(nodes graph.NodeSet) *graph.Graph {
return graph.New(rpt.prof, gopt)
}
// printProto writes the incoming proto via thw writer w.
// printProto writes the incoming proto via the writer w.
// If the divide_by option has been specified, samples are scaled appropriately.
func printProto(w io.Writer, rpt *Report) error {
p, o := rpt.prof, rpt.options
@ -339,6 +339,7 @@ func printTopProto(w io.Writer, rpt *Report) error {
Line: []profile.Line{
{
Line: int64(n.Info.Lineno),
Column: int64(n.Info.Columnno),
Function: f,
},
},

View File

@ -18,7 +18,6 @@ import (
"crypto/sha256"
"encoding/binary"
"fmt"
"regexp"
"github.com/google/pprof/internal/measurement"
"github.com/google/pprof/profile"
@ -54,9 +53,6 @@ type StackSource struct {
// Guaranteed to be non-empty.
Display []string
// Regular expression (anchored) that matches exactly FullName.
RE string
// Places holds the list of stack slots where this source occurs.
// In particular, if [a,b] is an element in Places,
// StackSet.Stacks[a].Sources[b] points to this source.
@ -135,7 +131,6 @@ func (s *StackSet) makeInitialStacks(rpt *Report) {
unknownIndex++
}
x.Inlined = inlined
x.RE = "^" + regexp.QuoteMeta(x.UniqueName) + "$"
x.Display = shortNameList(x.FullName)
s.Sources = append(s.Sources, x)
srcs[k] = len(s.Sources) - 1

View File

@ -181,6 +181,7 @@ func doLocalSymbolize(prof *profile.Profile, fast, force bool, obj plugin.ObjToo
l.Line[i] = profile.Line{
Function: f,
Line: int64(frame.Line),
Column: int64(frame.Column),
}
}

View File

@ -530,6 +530,7 @@ func (p *Line) decoder() []decoder {
func (p *Line) encode(b *buffer) {
encodeUint64Opt(b, 1, p.functionIDX)
encodeInt64Opt(b, 2, p.Line)
encodeInt64Opt(b, 3, p.Column)
}
var lineDecoder = []decoder{
@ -538,6 +539,8 @@ var lineDecoder = []decoder{
func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) },
// optional int64 line = 2
func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) },
// optional int64 column = 3
func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Column) },
}
func (p *Function) decoder() []decoder {

View File

@ -56,7 +56,7 @@ func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte
}
// Strip out addresses for better merge.
if err = p.Aggregate(true, true, true, true, false); err != nil {
if err = p.Aggregate(true, true, true, true, false, false); err != nil {
return nil, err
}
@ -99,7 +99,7 @@ func parseJavaProfile(b []byte) (*Profile, error) {
}
// Strip out addresses for better merge.
if err = p.Aggregate(true, true, true, true, false); err != nil {
if err = p.Aggregate(true, true, true, true, false, false); err != nil {
return nil, err
}

View File

@ -326,12 +326,13 @@ func (l *Location) key() locationKey {
key.addr -= l.Mapping.Start
key.mappingID = l.Mapping.ID
}
lines := make([]string, len(l.Line)*2)
lines := make([]string, len(l.Line)*3)
for i, line := range l.Line {
if line.Function != nil {
lines[i*2] = strconv.FormatUint(line.Function.ID, 16)
}
lines[i*2+1] = strconv.FormatInt(line.Line, 16)
lines[i*2+2] = strconv.FormatInt(line.Column, 16)
}
key.lines = strings.Join(lines, "|")
return key
@ -418,6 +419,7 @@ func (pm *profileMerger) mapLine(src Line) Line {
ln := Line{
Function: pm.mapFunction(src.Function),
Line: src.Line,
Column: src.Column,
}
return ln
}

View File

@ -145,6 +145,7 @@ type Location struct {
type Line struct {
Function *Function
Line int64
Column int64
functionIDX uint64
}
@ -436,7 +437,7 @@ func (p *Profile) CheckValid() error {
// Aggregate merges the locations in the profile into equivalence
// classes preserving the request attributes. It also updates the
// samples to point to the merged locations.
func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error {
func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, columnnumber, address bool) error {
for _, m := range p.Mapping {
m.HasInlineFrames = m.HasInlineFrames && inlineFrame
m.HasFunctions = m.HasFunctions && function
@ -458,7 +459,7 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
}
// Aggregate locations
if !inlineFrame || !address || !linenumber {
if !inlineFrame || !address || !linenumber || !columnnumber {
for _, l := range p.Location {
if !inlineFrame && len(l.Line) > 1 {
l.Line = l.Line[len(l.Line)-1:]
@ -466,6 +467,12 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
if !linenumber {
for i := range l.Line {
l.Line[i].Line = 0
l.Line[i].Column = 0
}
}
if !columnnumber {
for i := range l.Line {
l.Line[i].Column = 0
}
}
if !address {
@ -627,10 +634,11 @@ func (l *Location) string() string {
for li := range l.Line {
lnStr := "??"
if fn := l.Line[li].Function; fn != nil {
lnStr = fmt.Sprintf("%s %s:%d s=%d",
lnStr = fmt.Sprintf("%s %s:%d:%d s=%d",
fn.Name,
fn.Filename,
l.Line[li].Line,
l.Line[li].Column,
fn.StartLine)
if fn.Name != fn.SystemName {
lnStr = lnStr + "(" + fn.SystemName + ")"

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,13 +0,0 @@
Copyright 2010-2021 Mike Bostock
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

View File

@ -1,33 +0,0 @@
# Building a customized D3.js bundle
The D3.js version distributed with pprof is customized to only include the
modules required by pprof.
## Dependencies
- Install [npm](https://www.npmjs.com).
## Building
- Run `update.sh` to:
- Download npm package dependencies (declared in `package.json` and `package-lock.json`)
- Create a d3.js bundle containing the JavScript of d3 and d3-flame-graph (by running `webpack`)
This will `d3_flame_graph.go`, the minified custom D3.js bundle as Go source code.
# References / Appendix
## D3 Custom Bundle
A demonstration of building a custom D3 4.0 bundle using ES2015 modules and Rollup.
[bl.ocks.org/mbostock/bb09af4c39c79cffcde4](https://bl.ocks.org/mbostock/bb09af4c39c79cffcde4)
## Old version of d3-pprof
A previous version of d3-flame-graph bundled for pprof used Rollup instead of
Webpack. This has now been migrated directly into this directory.
The repository configuring Rollup was here:
[github.com/spiermar/d3-pprof](https://github.com/spiermar/d3-pprof)

File diff suppressed because one or more lines are too long

View File

@ -1,13 +0,0 @@
// This file exports a stripped-down API surface of d3 and d3-flame-graph,
// using only the functions used by pprof.
export {
select,
} from "d3-selection";
export {
default as flamegraph
// If we export from "d3-flame-graph" that exports the "dist" version which
// includes another copy of d3-selection. To avoid including d3-selection
// twice in the output, instead import the "src" version.
} from "d3-flame-graph/src/flamegraph";

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +0,0 @@
{
"name": "d3-pprof",
"version": "2.0.0",
"description": "A d3.js bundle for pprof.",
"scripts": {
"prepare": "webpack --mode production"
},
"license": "Apache-2.0",
"dependencies": {
"d3-flame-graph": "^4.1.3",
"d3-selection": "^3.0.0"
},
"devDependencies": {
"webpack": "^5.64.4",
"webpack-cli": "^4.9.1"
}
}

View File

@ -1,62 +0,0 @@
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env bash
set -eu
set -o pipefail
D3FLAMEGRAPH_CSS="d3-flamegraph.css"
cd $(dirname $0)
generate_d3_flame_graph_go() {
npm install
# https://stackoverflow.com/a/21199041/171898
local d3_js=$(cat d3.js | sed 's/`/`+"`"+`/g')
local d3_css=$(cat "node_modules/d3-flame-graph/dist/${D3FLAMEGRAPH_CSS}")
cat <<-EOF > d3_flame_graph.go
// D3.js is a JavaScript library for manipulating documents based on data.
// https://github.com/d3/d3
// See D3_LICENSE file for license details
// d3-flame-graph is a D3.js plugin that produces flame graphs from hierarchical data.
// https://github.com/spiermar/d3-flame-graph
// See D3_FLAME_GRAPH_LICENSE file for license details
package d3flamegraph
// JSSource returns the d3 and d3-flame-graph JavaScript bundle
const JSSource = \`
$d3_js
\`
// CSSSource returns the $D3FLAMEGRAPH_CSS file
const CSSSource = \`
$d3_css
\`
EOF
gofmt -w d3_flame_graph.go
}
get_licenses() {
cp node_modules/d3-selection/LICENSE D3_LICENSE
cp node_modules/d3-flame-graph/LICENSE D3_FLAME_GRAPH_LICENSE
}
get_licenses
generate_d3_flame_graph_go

View File

@ -1,13 +0,0 @@
// Minimal webpack config to package a minified JS bundle (including
// dependencies) for execution in a <script> tag in the browser.
module.exports = {
entry: './index.js',
output: {
path: __dirname, // Directory containing this webpack.config.js file.
filename: 'd3.js',
// Arbitrary; many module formats could be used, just keeping Universal
// Module Definition as it's the same as what we used in a previous
// version.
libraryTarget: 'umd',
},
};

View File

@ -3,295 +3,9 @@
package svgpan
import _ "embed"
// https://github.com/aleofreddi/svgpan
// JSSource returns the svgpan.js file
const JSSource = `
/**
* SVGPan library 1.2.2
* ======================
*
* Given an unique existing element with id "viewport" (or when missing, the
* first g-element), including the library into any SVG adds the following
* capabilities:
*
* - Mouse panning
* - Mouse zooming (using the wheel)
* - Object dragging
*
* You can configure the behaviour of the pan/zoom/drag with the variables
* listed in the CONFIGURATION section of this file.
*
* Known issues:
*
* - Zooming (while panning) on Safari has still some issues
*
* Releases:
*
* 1.2.2, Tue Aug 30 17:21:56 CEST 2011, Andrea Leofreddi
* - Fixed viewBox on root tag (#7)
* - Improved zoom speed (#2)
*
* 1.2.1, Mon Jul 4 00:33:18 CEST 2011, Andrea Leofreddi
* - Fixed a regression with mouse wheel (now working on Firefox 5)
* - Working with viewBox attribute (#4)
* - Added "use strict;" and fixed resulting warnings (#5)
* - Added configuration variables, dragging is disabled by default (#3)
*
* 1.2, Sat Mar 20 08:42:50 GMT 2010, Zeng Xiaohui
* Fixed a bug with browser mouse handler interaction
*
* 1.1, Wed Feb 3 17:39:33 GMT 2010, Zeng Xiaohui
* Updated the zoom code to support the mouse wheel on Safari/Chrome
*
* 1.0, Andrea Leofreddi
* First release
*
* This code is licensed under the following BSD license:
*
* Copyright 2009-2017 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of Andrea Leofreddi.
*/
"use strict";
/// CONFIGURATION
/// ====>
var enablePan = 1; // 1 or 0: enable or disable panning (default enabled)
var enableZoom = 1; // 1 or 0: enable or disable zooming (default enabled)
var enableDrag = 0; // 1 or 0: enable or disable dragging (default disabled)
var zoomScale = 0.2; // Zoom sensitivity
/// <====
/// END OF CONFIGURATION
var root = document.documentElement;
var state = 'none', svgRoot = null, stateTarget, stateOrigin, stateTf;
setupHandlers(root);
/**
* Register handlers
*/
function setupHandlers(root){
setAttributes(root, {
"onmouseup" : "handleMouseUp(evt)",
"onmousedown" : "handleMouseDown(evt)",
"onmousemove" : "handleMouseMove(evt)",
//"onmouseout" : "handleMouseUp(evt)", // Decomment this to stop the pan functionality when dragging out of the SVG element
});
if(navigator.userAgent.toLowerCase().indexOf('webkit') >= 0)
window.addEventListener('mousewheel', handleMouseWheel, false); // Chrome/Safari
else
window.addEventListener('DOMMouseScroll', handleMouseWheel, false); // Others
}
/**
* Retrieves the root element for SVG manipulation. The element is then cached into the svgRoot global variable.
*/
function getRoot(root) {
if(svgRoot == null) {
var r = root.getElementById("viewport") ? root.getElementById("viewport") : root.documentElement, t = r;
while(t != root) {
if(t.getAttribute("viewBox")) {
setCTM(r, t.getCTM());
t.removeAttribute("viewBox");
}
t = t.parentNode;
}
svgRoot = r;
}
return svgRoot;
}
/**
* Instance an SVGPoint object with given event coordinates.
*/
function getEventPoint(evt) {
var p = root.createSVGPoint();
p.x = evt.clientX;
p.y = evt.clientY;
return p;
}
/**
* Sets the current transform matrix of an element.
*/
function setCTM(element, matrix) {
var s = "matrix(" + matrix.a + "," + matrix.b + "," + matrix.c + "," + matrix.d + "," + matrix.e + "," + matrix.f + ")";
element.setAttribute("transform", s);
}
/**
* Dumps a matrix to a string (useful for debug).
*/
function dumpMatrix(matrix) {
var s = "[ " + matrix.a + ", " + matrix.c + ", " + matrix.e + "\n " + matrix.b + ", " + matrix.d + ", " + matrix.f + "\n 0, 0, 1 ]";
return s;
}
/**
* Sets attributes of an element.
*/
function setAttributes(element, attributes){
for (var i in attributes)
element.setAttributeNS(null, i, attributes[i]);
}
/**
* Handle mouse wheel event.
*/
function handleMouseWheel(evt) {
if(!enableZoom)
return;
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var delta;
if(evt.wheelDelta)
delta = evt.wheelDelta / 360; // Chrome/Safari
else
delta = evt.detail / -9; // Mozilla
var z = Math.pow(1 + zoomScale, delta);
var g = getRoot(svgDoc);
var p = getEventPoint(evt);
p = p.matrixTransform(g.getCTM().inverse());
// Compute new scale matrix in current mouse position
var k = root.createSVGMatrix().translate(p.x, p.y).scale(z).translate(-p.x, -p.y);
setCTM(g, g.getCTM().multiply(k));
if(typeof(stateTf) == "undefined")
stateTf = g.getCTM().inverse();
stateTf = stateTf.multiply(k.inverse());
}
/**
* Handle mouse move event.
*/
function handleMouseMove(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var g = getRoot(svgDoc);
if(state == 'pan' && enablePan) {
// Pan mode
var p = getEventPoint(evt).matrixTransform(stateTf);
setCTM(g, stateTf.inverse().translate(p.x - stateOrigin.x, p.y - stateOrigin.y));
} else if(state == 'drag' && enableDrag) {
// Drag mode
var p = getEventPoint(evt).matrixTransform(g.getCTM().inverse());
setCTM(stateTarget, root.createSVGMatrix().translate(p.x - stateOrigin.x, p.y - stateOrigin.y).multiply(g.getCTM().inverse()).multiply(stateTarget.getCTM()));
stateOrigin = p;
}
}
/**
* Handle click event.
*/
function handleMouseDown(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var g = getRoot(svgDoc);
if(
evt.target.tagName == "svg"
|| !enableDrag // Pan anyway when drag is disabled and the user clicked on an element
) {
// Pan mode
state = 'pan';
stateTf = g.getCTM().inverse();
stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
} else {
// Drag mode
state = 'drag';
stateTarget = evt.target;
stateTf = g.getCTM().inverse();
stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
}
}
/**
* Handle mouse button release event.
*/
function handleMouseUp(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
if(state == 'pan' || state == 'drag') {
// Quit pan mode
state = '';
}
}
`
//go:embed svgpan.js
var JSSource string

View File

@ -0,0 +1,261 @@
/**
* SVGPan library 1.2.2
* ======================
*
* Given an unique existing element with id "viewport" (or when missing, the
* first g-element), including the the library into any SVG adds the following
* capabilities:
*
* - Mouse panning
* - Mouse zooming (using the wheel)
* - Object dragging
*
* You can configure the behaviour of the pan/zoom/drag with the variables
* listed in the CONFIGURATION section of this file.
*
* This code is licensed under the following BSD license:
*
* Copyright 2009-2019 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of Andrea Leofreddi.
*/
"use strict";
/// CONFIGURATION
/// ====>
var enablePan = 1; // 1 or 0: enable or disable panning (default enabled)
var enableZoom = 1; // 1 or 0: enable or disable zooming (default enabled)
var enableDrag = 0; // 1 or 0: enable or disable dragging (default disabled)
var zoomScale = 0.2; // Zoom sensitivity
/// <====
/// END OF CONFIGURATION
var root = document.documentElement;
var state = 'none', svgRoot = null, stateTarget, stateOrigin, stateTf;
setupHandlers(root);
/**
* Register handlers
*/
function setupHandlers(root){
setAttributes(root, {
"onmouseup" : "handleMouseUp(evt)",
"onmousedown" : "handleMouseDown(evt)",
"onmousemove" : "handleMouseMove(evt)",
//"onmouseout" : "handleMouseUp(evt)", // Decomment this to stop the pan functionality when dragging out of the SVG element
});
if(navigator.userAgent.toLowerCase().indexOf('webkit') >= 0)
window.addEventListener('mousewheel', handleMouseWheel, false); // Chrome/Safari
else
window.addEventListener('DOMMouseScroll', handleMouseWheel, false); // Others
}
/**
* Retrieves the root element for SVG manipulation. The element is then cached into the svgRoot global variable.
*/
function getRoot(root) {
if(svgRoot == null) {
var r = root.getElementById("viewport") ? root.getElementById("viewport") : root.documentElement, t = r;
while(t != root) {
if(t.getAttribute("viewBox")) {
setCTM(r, t.getCTM());
t.removeAttribute("viewBox");
}
t = t.parentNode;
}
svgRoot = r;
}
return svgRoot;
}
/**
* Instance an SVGPoint object with given event coordinates.
*/
function getEventPoint(evt) {
var p = root.createSVGPoint();
p.x = evt.clientX;
p.y = evt.clientY;
return p;
}
/**
* Sets the current transform matrix of an element.
*/
function setCTM(element, matrix) {
var s = "matrix(" + matrix.a + "," + matrix.b + "," + matrix.c + "," + matrix.d + "," + matrix.e + "," + matrix.f + ")";
element.setAttribute("transform", s);
}
/**
* Dumps a matrix to a string (useful for debug).
*/
function dumpMatrix(matrix) {
var s = "[ " + matrix.a + ", " + matrix.c + ", " + matrix.e + "\n " + matrix.b + ", " + matrix.d + ", " + matrix.f + "\n 0, 0, 1 ]";
return s;
}
/**
* Sets attributes of an element.
*/
function setAttributes(element, attributes){
for (var i in attributes)
element.setAttributeNS(null, i, attributes[i]);
}
/**
* Handle mouse wheel event.
*/
function handleMouseWheel(evt) {
if(!enableZoom)
return;
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var delta;
if(evt.wheelDelta)
delta = evt.wheelDelta / 360; // Chrome/Safari
else
delta = evt.detail / -9; // Mozilla
var z = Math.pow(1 + zoomScale, delta);
var g = getRoot(svgDoc);
var p = getEventPoint(evt);
p = p.matrixTransform(g.getCTM().inverse());
// Compute new scale matrix in current mouse position
var k = root.createSVGMatrix().translate(p.x, p.y).scale(z).translate(-p.x, -p.y);
setCTM(g, g.getCTM().multiply(k));
if(typeof(stateTf) == "undefined")
stateTf = g.getCTM().inverse();
stateTf = stateTf.multiply(k.inverse());
}
/**
* Handle mouse move event.
*/
function handleMouseMove(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var g = getRoot(svgDoc);
if(state == 'pan' && enablePan) {
// Pan mode
var p = getEventPoint(evt).matrixTransform(stateTf);
setCTM(g, stateTf.inverse().translate(p.x - stateOrigin.x, p.y - stateOrigin.y));
} else if(state == 'drag' && enableDrag) {
// Drag mode
var p = getEventPoint(evt).matrixTransform(g.getCTM().inverse());
setCTM(stateTarget, root.createSVGMatrix().translate(p.x - stateOrigin.x, p.y - stateOrigin.y).multiply(g.getCTM().inverse()).multiply(stateTarget.getCTM()));
stateOrigin = p;
}
}
/**
* Handle click event.
*/
function handleMouseDown(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
var g = getRoot(svgDoc);
if(
evt.target.tagName == "svg"
|| !enableDrag // Pan anyway when drag is disabled and the user clicked on an element
) {
// Pan mode
state = 'pan';
stateTf = g.getCTM().inverse();
stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
} else {
// Drag mode
state = 'drag';
stateTarget = evt.target;
stateTf = g.getCTM().inverse();
stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
}
}
/**
* Handle mouse button release event.
*/
function handleMouseUp(evt) {
if(evt.preventDefault)
evt.preventDefault();
evt.returnValue = false;
var svgDoc = evt.target.ownerDocument;
if(state == 'pan' || state == 'drag') {
// Quit pan mode
state = '';
}
}

View File

@ -308,6 +308,7 @@ var laxGoVersionRE = lazyregexp.New(`^v?(([1-9][0-9]*)\.(0|[1-9][0-9]*))([^0-9].
// Toolchains must be named beginning with `go1`,
// like "go1.20.3" or "go1.20.3-gccgo". As a special case, "default" is also permitted.
// TODO(samthanawalla): Replace regex with https://pkg.go.dev/go/version#IsValid in 1.23+
var ToolchainRE = lazyregexp.New(`^default$|^go1($|\.)`)
func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) {
@ -384,7 +385,7 @@ func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, a
errorf("toolchain directive expects exactly one argument")
return
} else if strict && !ToolchainRE.MatchString(args[0]) {
errorf("invalid toolchain version '%s': must match format go1.23.0 or local", args[0])
errorf("invalid toolchain version '%s': must match format go1.23.0 or default", args[0])
return
}
f.Toolchain = &Toolchain{Syntax: line}
@ -630,7 +631,7 @@ func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string,
errorf("go directive expects exactly one argument")
return
} else if !GoVersionRE.MatchString(args[0]) {
errorf("invalid go version '%s': must match format 1.23", args[0])
errorf("invalid go version '%s': must match format 1.23.0", args[0])
return
}
@ -646,7 +647,7 @@ func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string,
errorf("toolchain directive expects exactly one argument")
return
} else if !ToolchainRE.MatchString(args[0]) {
errorf("invalid toolchain version '%s': must match format go1.23 or local", args[0])
errorf("invalid toolchain version '%s': must match format go1.23.0 or default", args[0])
return
}

View File

@ -8,7 +8,6 @@ import (
"bytes"
"errors"
"fmt"
"path"
"strings"
"sync"
"sync/atomic"
@ -193,51 +192,7 @@ func (c *Client) SetGONOSUMDB(list string) {
var ErrGONOSUMDB = errors.New("skipped (listed in GONOSUMDB)")
func (c *Client) skip(target string) bool {
return globsMatchPath(c.nosumdb, target)
}
// globsMatchPath reports whether any path prefix of target
// matches one of the glob patterns (as defined by path.Match)
// in the comma-separated globs list.
// It ignores any empty or malformed patterns in the list.
func globsMatchPath(globs, target string) bool {
for globs != "" {
// Extract next non-empty glob in comma-separated list.
var glob string
if i := strings.Index(globs, ","); i >= 0 {
glob, globs = globs[:i], globs[i+1:]
} else {
glob, globs = globs, ""
}
if glob == "" {
continue
}
// A glob with N+1 path elements (N slashes) needs to be matched
// against the first N+1 path elements of target,
// which end just before the N+1'th slash.
n := strings.Count(glob, "/")
prefix := target
// Walk target, counting slashes, truncating at the N+1'th slash.
for i := 0; i < len(target); i++ {
if target[i] == '/' {
if n == 0 {
prefix = target[:i]
break
}
n--
}
}
if n > 0 {
// Not enough prefix elements.
continue
}
matched, _ := path.Match(glob, prefix)
if matched {
return true
}
}
return false
return module.MatchPrefixPatterns(c.nosumdb, target)
}
// Lookup returns the go.sum lines for the given module path and version.

View File

@ -1785,6 +1785,8 @@ const (
LANDLOCK_ACCESS_FS_REMOVE_FILE = 0x20
LANDLOCK_ACCESS_FS_TRUNCATE = 0x4000
LANDLOCK_ACCESS_FS_WRITE_FILE = 0x2
LANDLOCK_ACCESS_NET_BIND_TCP = 0x1
LANDLOCK_ACCESS_NET_CONNECT_TCP = 0x2
LANDLOCK_CREATE_RULESET_VERSION = 0x1
LINUX_REBOOT_CMD_CAD_OFF = 0x0
LINUX_REBOOT_CMD_CAD_ON = 0x89abcdef
@ -2465,6 +2467,7 @@ const (
PR_MCE_KILL_GET = 0x22
PR_MCE_KILL_LATE = 0x0
PR_MCE_KILL_SET = 0x1
PR_MDWE_NO_INHERIT = 0x2
PR_MDWE_REFUSE_EXEC_GAIN = 0x1
PR_MPX_DISABLE_MANAGEMENT = 0x2c
PR_MPX_ENABLE_MANAGEMENT = 0x2b
@ -2669,8 +2672,9 @@ const (
RTAX_FEATURES = 0xc
RTAX_FEATURE_ALLFRAG = 0x8
RTAX_FEATURE_ECN = 0x1
RTAX_FEATURE_MASK = 0xf
RTAX_FEATURE_MASK = 0x1f
RTAX_FEATURE_SACK = 0x2
RTAX_FEATURE_TCP_USEC_TS = 0x10
RTAX_FEATURE_TIMESTAMP = 0x4
RTAX_HOPLIMIT = 0xa
RTAX_INITCWND = 0xb
@ -3104,6 +3108,7 @@ const (
SOL_TIPC = 0x10f
SOL_TLS = 0x11a
SOL_UDP = 0x11
SOL_VSOCK = 0x11f
SOL_X25 = 0x106
SOL_XDP = 0x11b
SOMAXCONN = 0x1000

Some files were not shown because too many files have changed in this diff Show More