Source file
src/reflect/value.go
Documentation: reflect
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/itoa"
12 "internal/unsafeheader"
13 "math"
14 "runtime"
15 "unsafe"
16 )
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41
42 typ_ *abi.Type
43
44
45
46 ptr unsafe.Pointer
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62 flag
63
64
65
66
67
68
69 }
70
71 type flag uintptr
72
73 const (
74 flagKindWidth = 5
75 flagKindMask flag = 1<<flagKindWidth - 1
76 flagStickyRO flag = 1 << 5
77 flagEmbedRO flag = 1 << 6
78 flagIndir flag = 1 << 7
79 flagAddr flag = 1 << 8
80 flagMethod flag = 1 << 9
81 flagMethodShift = 10
82 flagRO flag = flagStickyRO | flagEmbedRO
83 )
84
85 func (f flag) kind() Kind {
86 return Kind(f & flagKindMask)
87 }
88
89 func (f flag) ro() flag {
90 if f&flagRO != 0 {
91 return flagStickyRO
92 }
93 return 0
94 }
95
96 func (v Value) typ() *abi.Type {
97
98
99
100
101
102 return (*abi.Type)(noescape(unsafe.Pointer(v.typ_)))
103 }
104
105
106
107
108 func (v Value) pointer() unsafe.Pointer {
109 if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() {
110 panic("can't call pointer on a non-pointer Value")
111 }
112 if v.flag&flagIndir != 0 {
113 return *(*unsafe.Pointer)(v.ptr)
114 }
115 return v.ptr
116 }
117
118
119 func packEface(v Value) any {
120 t := v.typ()
121 var i any
122 e := (*emptyInterface)(unsafe.Pointer(&i))
123
124 switch {
125 case t.IfaceIndir():
126 if v.flag&flagIndir == 0 {
127 panic("bad indir")
128 }
129
130 ptr := v.ptr
131 if v.flag&flagAddr != 0 {
132
133
134 c := unsafe_New(t)
135 typedmemmove(t, c, ptr)
136 ptr = c
137 }
138 e.word = ptr
139 case v.flag&flagIndir != 0:
140
141
142 e.word = *(*unsafe.Pointer)(v.ptr)
143 default:
144
145 e.word = v.ptr
146 }
147
148
149
150
151 e.typ = t
152 return i
153 }
154
155
156 func unpackEface(i any) Value {
157 e := (*emptyInterface)(unsafe.Pointer(&i))
158
159 t := e.typ
160 if t == nil {
161 return Value{}
162 }
163 f := flag(t.Kind())
164 if t.IfaceIndir() {
165 f |= flagIndir
166 }
167 return Value{t, e.word, f}
168 }
169
170
171
172
173 type ValueError struct {
174 Method string
175 Kind Kind
176 }
177
178 func (e *ValueError) Error() string {
179 if e.Kind == 0 {
180 return "reflect: call of " + e.Method + " on zero Value"
181 }
182 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
183 }
184
185
186 func valueMethodName() string {
187 var pc [5]uintptr
188 n := runtime.Callers(1, pc[:])
189 frames := runtime.CallersFrames(pc[:n])
190 var frame runtime.Frame
191 for more := true; more; {
192 const prefix = "reflect.Value."
193 frame, more = frames.Next()
194 name := frame.Function
195 if len(name) > len(prefix) && name[:len(prefix)] == prefix {
196 methodName := name[len(prefix):]
197 if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' {
198 return name
199 }
200 }
201 }
202 return "unknown method"
203 }
204
205
206 type emptyInterface struct {
207 typ *abi.Type
208 word unsafe.Pointer
209 }
210
211
212 type nonEmptyInterface struct {
213
214 itab *struct {
215 ityp *abi.Type
216 typ *abi.Type
217 hash uint32
218 _ [4]byte
219 fun [100000]unsafe.Pointer
220 }
221 word unsafe.Pointer
222 }
223
224
225
226
227
228
229
230 func (f flag) mustBe(expected Kind) {
231
232 if Kind(f&flagKindMask) != expected {
233 panic(&ValueError{valueMethodName(), f.kind()})
234 }
235 }
236
237
238
239 func (f flag) mustBeExported() {
240 if f == 0 || f&flagRO != 0 {
241 f.mustBeExportedSlow()
242 }
243 }
244
245 func (f flag) mustBeExportedSlow() {
246 if f == 0 {
247 panic(&ValueError{valueMethodName(), Invalid})
248 }
249 if f&flagRO != 0 {
250 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
251 }
252 }
253
254
255
256
257 func (f flag) mustBeAssignable() {
258 if f&flagRO != 0 || f&flagAddr == 0 {
259 f.mustBeAssignableSlow()
260 }
261 }
262
263 func (f flag) mustBeAssignableSlow() {
264 if f == 0 {
265 panic(&ValueError{valueMethodName(), Invalid})
266 }
267
268 if f&flagRO != 0 {
269 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
270 }
271 if f&flagAddr == 0 {
272 panic("reflect: " + valueMethodName() + " using unaddressable value")
273 }
274 }
275
276
277
278
279
280
281 func (v Value) Addr() Value {
282 if v.flag&flagAddr == 0 {
283 panic("reflect.Value.Addr of unaddressable value")
284 }
285
286
287 fl := v.flag & flagRO
288 return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)}
289 }
290
291
292
293 func (v Value) Bool() bool {
294
295 if v.kind() != Bool {
296 v.panicNotBool()
297 }
298 return *(*bool)(v.ptr)
299 }
300
301 func (v Value) panicNotBool() {
302 v.mustBe(Bool)
303 }
304
305 var bytesType = rtypeOf(([]byte)(nil))
306
307
308
309
310 func (v Value) Bytes() []byte {
311
312 if v.typ_ == bytesType {
313 return *(*[]byte)(v.ptr)
314 }
315 return v.bytesSlow()
316 }
317
318 func (v Value) bytesSlow() []byte {
319 switch v.kind() {
320 case Slice:
321 if v.typ().Elem().Kind() != abi.Uint8 {
322 panic("reflect.Value.Bytes of non-byte slice")
323 }
324
325 return *(*[]byte)(v.ptr)
326 case Array:
327 if v.typ().Elem().Kind() != abi.Uint8 {
328 panic("reflect.Value.Bytes of non-byte array")
329 }
330 if !v.CanAddr() {
331 panic("reflect.Value.Bytes of unaddressable byte array")
332 }
333 p := (*byte)(v.ptr)
334 n := int((*arrayType)(unsafe.Pointer(v.typ())).Len)
335 return unsafe.Slice(p, n)
336 }
337 panic(&ValueError{"reflect.Value.Bytes", v.kind()})
338 }
339
340
341
342 func (v Value) runes() []rune {
343 v.mustBe(Slice)
344 if v.typ().Elem().Kind() != abi.Int32 {
345 panic("reflect.Value.Bytes of non-rune slice")
346 }
347
348 return *(*[]rune)(v.ptr)
349 }
350
351
352
353
354
355
356 func (v Value) CanAddr() bool {
357 return v.flag&flagAddr != 0
358 }
359
360
361
362
363
364
365 func (v Value) CanSet() bool {
366 return v.flag&(flagAddr|flagRO) == flagAddr
367 }
368
369
370
371
372
373
374
375
376
377 func (v Value) Call(in []Value) []Value {
378 v.mustBe(Func)
379 v.mustBeExported()
380 return v.call("Call", in)
381 }
382
383
384
385
386
387
388
389
390 func (v Value) CallSlice(in []Value) []Value {
391 v.mustBe(Func)
392 v.mustBeExported()
393 return v.call("CallSlice", in)
394 }
395
396 var callGC bool
397
398 const debugReflectCall = false
399
400 func (v Value) call(op string, in []Value) []Value {
401
402 t := (*funcType)(unsafe.Pointer(v.typ()))
403 var (
404 fn unsafe.Pointer
405 rcvr Value
406 rcvrtype *abi.Type
407 )
408 if v.flag&flagMethod != 0 {
409 rcvr = v
410 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
411 } else if v.flag&flagIndir != 0 {
412 fn = *(*unsafe.Pointer)(v.ptr)
413 } else {
414 fn = v.ptr
415 }
416
417 if fn == nil {
418 panic("reflect.Value.Call: call of nil function")
419 }
420
421 isSlice := op == "CallSlice"
422 n := t.NumIn()
423 isVariadic := t.IsVariadic()
424 if isSlice {
425 if !isVariadic {
426 panic("reflect: CallSlice of non-variadic function")
427 }
428 if len(in) < n {
429 panic("reflect: CallSlice with too few input arguments")
430 }
431 if len(in) > n {
432 panic("reflect: CallSlice with too many input arguments")
433 }
434 } else {
435 if isVariadic {
436 n--
437 }
438 if len(in) < n {
439 panic("reflect: Call with too few input arguments")
440 }
441 if !isVariadic && len(in) > n {
442 panic("reflect: Call with too many input arguments")
443 }
444 }
445 for _, x := range in {
446 if x.Kind() == Invalid {
447 panic("reflect: " + op + " using zero Value argument")
448 }
449 }
450 for i := 0; i < n; i++ {
451 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(toRType(targ)) {
452 panic("reflect: " + op + " using " + xt.String() + " as type " + stringFor(targ))
453 }
454 }
455 if !isSlice && isVariadic {
456
457 m := len(in) - n
458 slice := MakeSlice(toRType(t.In(n)), m, m)
459 elem := toRType(t.In(n)).Elem()
460 for i := 0; i < m; i++ {
461 x := in[n+i]
462 if xt := x.Type(); !xt.AssignableTo(elem) {
463 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
464 }
465 slice.Index(i).Set(x)
466 }
467 origIn := in
468 in = make([]Value, n+1)
469 copy(in[:n], origIn)
470 in[n] = slice
471 }
472
473 nin := len(in)
474 if nin != t.NumIn() {
475 panic("reflect.Value.Call: wrong argument count")
476 }
477 nout := t.NumOut()
478
479
480 var regArgs abi.RegArgs
481
482
483 frametype, framePool, abid := funcLayout(t, rcvrtype)
484
485
486 var stackArgs unsafe.Pointer
487 if frametype.Size() != 0 {
488 if nout == 0 {
489 stackArgs = framePool.Get().(unsafe.Pointer)
490 } else {
491
492
493 stackArgs = unsafe_New(frametype)
494 }
495 }
496 frameSize := frametype.Size()
497
498 if debugReflectCall {
499 println("reflect.call", stringFor(&t.Type))
500 abid.dump()
501 }
502
503
504
505
506 inStart := 0
507 if rcvrtype != nil {
508
509
510
511 switch st := abid.call.steps[0]; st.kind {
512 case abiStepStack:
513 storeRcvr(rcvr, stackArgs)
514 case abiStepPointer:
515 storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg]))
516 fallthrough
517 case abiStepIntReg:
518 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
519 case abiStepFloatReg:
520 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
521 default:
522 panic("unknown ABI parameter kind")
523 }
524 inStart = 1
525 }
526
527
528 for i, v := range in {
529 v.mustBeExported()
530 targ := toRType(t.In(i))
531
532
533
534 v = v.assignTo("reflect.Value.Call", &targ.t, nil)
535 stepsLoop:
536 for _, st := range abid.call.stepsForValue(i + inStart) {
537 switch st.kind {
538 case abiStepStack:
539
540 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
541 if v.flag&flagIndir != 0 {
542 typedmemmove(&targ.t, addr, v.ptr)
543 } else {
544 *(*unsafe.Pointer)(addr) = v.ptr
545 }
546
547 break stepsLoop
548 case abiStepIntReg, abiStepPointer:
549
550 if v.flag&flagIndir != 0 {
551 offset := add(v.ptr, st.offset, "precomputed value offset")
552 if st.kind == abiStepPointer {
553
554
555
556 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
557 }
558 intToReg(®Args, st.ireg, st.size, offset)
559 } else {
560 if st.kind == abiStepPointer {
561
562 regArgs.Ptrs[st.ireg] = v.ptr
563 }
564 regArgs.Ints[st.ireg] = uintptr(v.ptr)
565 }
566 case abiStepFloatReg:
567
568 if v.flag&flagIndir == 0 {
569 panic("attempted to copy pointer to FP register")
570 }
571 offset := add(v.ptr, st.offset, "precomputed value offset")
572 floatToReg(®Args, st.freg, st.size, offset)
573 default:
574 panic("unknown ABI part kind")
575 }
576 }
577 }
578
579
580 frameSize = align(frameSize, goarch.PtrSize)
581 frameSize += abid.spill
582
583
584 regArgs.ReturnIsPtr = abid.outRegPtrs
585
586 if debugReflectCall {
587 regArgs.Dump()
588 }
589
590
591 if callGC {
592 runtime.GC()
593 }
594
595
596 call(frametype, fn, stackArgs, uint32(frametype.Size()), uint32(abid.retOffset), uint32(frameSize), ®Args)
597
598
599 if callGC {
600 runtime.GC()
601 }
602
603 var ret []Value
604 if nout == 0 {
605 if stackArgs != nil {
606 typedmemclr(frametype, stackArgs)
607 framePool.Put(stackArgs)
608 }
609 } else {
610 if stackArgs != nil {
611
612
613
614 typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset)
615 }
616
617
618 ret = make([]Value, nout)
619 for i := 0; i < nout; i++ {
620 tv := t.Out(i)
621 if tv.Size() == 0 {
622
623
624 ret[i] = Zero(toRType(tv))
625 continue
626 }
627 steps := abid.ret.stepsForValue(i)
628 if st := steps[0]; st.kind == abiStepStack {
629
630
631
632 fl := flagIndir | flag(tv.Kind())
633 ret[i] = Value{tv, add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
634
635
636
637
638 continue
639 }
640
641
642 if !ifaceIndir(tv) {
643
644
645 if steps[0].kind != abiStepPointer {
646 print("kind=", steps[0].kind, ", type=", stringFor(tv), "\n")
647 panic("mismatch between ABI description and types")
648 }
649 ret[i] = Value{tv, regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
650 continue
651 }
652
653
654
655
656
657
658
659
660
661
662 s := unsafe_New(tv)
663 for _, st := range steps {
664 switch st.kind {
665 case abiStepIntReg:
666 offset := add(s, st.offset, "precomputed value offset")
667 intFromReg(®Args, st.ireg, st.size, offset)
668 case abiStepPointer:
669 s := add(s, st.offset, "precomputed value offset")
670 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
671 case abiStepFloatReg:
672 offset := add(s, st.offset, "precomputed value offset")
673 floatFromReg(®Args, st.freg, st.size, offset)
674 case abiStepStack:
675 panic("register-based return value has stack component")
676 default:
677 panic("unknown ABI part kind")
678 }
679 }
680 ret[i] = Value{tv, s, flagIndir | flag(tv.Kind())}
681 }
682 }
683
684 return ret
685 }
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
708 if callGC {
709
710
711
712
713
714 runtime.GC()
715 }
716 ftyp := ctxt.ftyp
717 f := ctxt.fn
718
719 _, _, abid := funcLayout(ftyp, nil)
720
721
722 ptr := frame
723 in := make([]Value, 0, int(ftyp.InCount))
724 for i, typ := range ftyp.InSlice() {
725 if typ.Size() == 0 {
726 in = append(in, Zero(toRType(typ)))
727 continue
728 }
729 v := Value{typ, nil, flag(typ.Kind())}
730 steps := abid.call.stepsForValue(i)
731 if st := steps[0]; st.kind == abiStepStack {
732 if ifaceIndir(typ) {
733
734
735
736
737 v.ptr = unsafe_New(typ)
738 if typ.Size() > 0 {
739 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
740 }
741 v.flag |= flagIndir
742 } else {
743 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
744 }
745 } else {
746 if ifaceIndir(typ) {
747
748
749 v.flag |= flagIndir
750 v.ptr = unsafe_New(typ)
751 for _, st := range steps {
752 switch st.kind {
753 case abiStepIntReg:
754 offset := add(v.ptr, st.offset, "precomputed value offset")
755 intFromReg(regs, st.ireg, st.size, offset)
756 case abiStepPointer:
757 s := add(v.ptr, st.offset, "precomputed value offset")
758 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
759 case abiStepFloatReg:
760 offset := add(v.ptr, st.offset, "precomputed value offset")
761 floatFromReg(regs, st.freg, st.size, offset)
762 case abiStepStack:
763 panic("register-based return value has stack component")
764 default:
765 panic("unknown ABI part kind")
766 }
767 }
768 } else {
769
770
771 if steps[0].kind != abiStepPointer {
772 print("kind=", steps[0].kind, ", type=", stringFor(typ), "\n")
773 panic("mismatch between ABI description and types")
774 }
775 v.ptr = regs.Ptrs[steps[0].ireg]
776 }
777 }
778 in = append(in, v)
779 }
780
781
782 out := f(in)
783 numOut := ftyp.NumOut()
784 if len(out) != numOut {
785 panic("reflect: wrong return count from function created by MakeFunc")
786 }
787
788
789 if numOut > 0 {
790 for i, typ := range ftyp.OutSlice() {
791 v := out[i]
792 if v.typ() == nil {
793 panic("reflect: function created by MakeFunc using " + funcName(f) +
794 " returned zero Value")
795 }
796 if v.flag&flagRO != 0 {
797 panic("reflect: function created by MakeFunc using " + funcName(f) +
798 " returned value obtained from unexported field")
799 }
800 if typ.Size() == 0 {
801 continue
802 }
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818 v = v.assignTo("reflect.MakeFunc", typ, nil)
819 stepsLoop:
820 for _, st := range abid.ret.stepsForValue(i) {
821 switch st.kind {
822 case abiStepStack:
823
824 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
825
826
827
828
829 if v.flag&flagIndir != 0 {
830 memmove(addr, v.ptr, st.size)
831 } else {
832
833 *(*uintptr)(addr) = uintptr(v.ptr)
834 }
835
836 break stepsLoop
837 case abiStepIntReg, abiStepPointer:
838
839 if v.flag&flagIndir != 0 {
840 offset := add(v.ptr, st.offset, "precomputed value offset")
841 intToReg(regs, st.ireg, st.size, offset)
842 } else {
843
844
845
846
847
848 regs.Ints[st.ireg] = uintptr(v.ptr)
849 }
850 case abiStepFloatReg:
851
852 if v.flag&flagIndir == 0 {
853 panic("attempted to copy pointer to FP register")
854 }
855 offset := add(v.ptr, st.offset, "precomputed value offset")
856 floatToReg(regs, st.freg, st.size, offset)
857 default:
858 panic("unknown ABI part kind")
859 }
860 }
861 }
862 }
863
864
865
866 *retValid = true
867
868
869
870
871
872 runtime.KeepAlive(out)
873
874
875
876
877 runtime.KeepAlive(ctxt)
878 }
879
880
881
882
883
884
885
886
887 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
888 i := methodIndex
889 if v.typ().Kind() == abi.Interface {
890 tt := (*interfaceType)(unsafe.Pointer(v.typ()))
891 if uint(i) >= uint(len(tt.Methods)) {
892 panic("reflect: internal error: invalid method index")
893 }
894 m := &tt.Methods[i]
895 if !tt.nameOff(m.Name).IsExported() {
896 panic("reflect: " + op + " of unexported method")
897 }
898 iface := (*nonEmptyInterface)(v.ptr)
899 if iface.itab == nil {
900 panic("reflect: " + op + " of method on nil interface value")
901 }
902 rcvrtype = iface.itab.typ
903 fn = unsafe.Pointer(&iface.itab.fun[i])
904 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
905 } else {
906 rcvrtype = v.typ()
907 ms := v.typ().ExportedMethods()
908 if uint(i) >= uint(len(ms)) {
909 panic("reflect: internal error: invalid method index")
910 }
911 m := ms[i]
912 if !nameOffFor(v.typ(), m.Name).IsExported() {
913 panic("reflect: " + op + " of unexported method")
914 }
915 ifn := textOffFor(v.typ(), m.Ifn)
916 fn = unsafe.Pointer(&ifn)
917 t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp)))
918 }
919 return
920 }
921
922
923
924
925
926 func storeRcvr(v Value, p unsafe.Pointer) {
927 t := v.typ()
928 if t.Kind() == abi.Interface {
929
930 iface := (*nonEmptyInterface)(v.ptr)
931 *(*unsafe.Pointer)(p) = iface.word
932 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
933 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
934 } else {
935 *(*unsafe.Pointer)(p) = v.ptr
936 }
937 }
938
939
940
941 func align(x, n uintptr) uintptr {
942 return (x + n - 1) &^ (n - 1)
943 }
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
965 rcvr := ctxt.rcvr
966 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
967
968
969
970
971
972
973
974
975
976 _, _, valueABI := funcLayout(valueFuncType, nil)
977 valueFrame, valueRegs := frame, regs
978 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
979
980
981
982 methodFrame := methodFramePool.Get().(unsafe.Pointer)
983 var methodRegs abi.RegArgs
984
985
986 switch st := methodABI.call.steps[0]; st.kind {
987 case abiStepStack:
988
989
990 storeRcvr(rcvr, methodFrame)
991 case abiStepPointer:
992
993 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg]))
994 fallthrough
995 case abiStepIntReg:
996 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg]))
997 case abiStepFloatReg:
998 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg]))
999 default:
1000 panic("unknown ABI parameter kind")
1001 }
1002
1003
1004 for i, t := range valueFuncType.InSlice() {
1005 valueSteps := valueABI.call.stepsForValue(i)
1006 methodSteps := methodABI.call.stepsForValue(i + 1)
1007
1008
1009 if len(valueSteps) == 0 {
1010 if len(methodSteps) != 0 {
1011 panic("method ABI and value ABI do not align")
1012 }
1013 continue
1014 }
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
1027 mStep := methodSteps[0]
1028
1029 if mStep.kind == abiStepStack {
1030 if vStep.size != mStep.size {
1031 panic("method ABI and value ABI do not align")
1032 }
1033 typedmemmove(t,
1034 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
1035 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
1036 continue
1037 }
1038
1039 for _, mStep := range methodSteps {
1040 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
1041 switch mStep.kind {
1042 case abiStepPointer:
1043
1044 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1045 fallthrough
1046 case abiStepIntReg:
1047 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1048 case abiStepFloatReg:
1049 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1050 default:
1051 panic("unexpected method step")
1052 }
1053 }
1054 continue
1055 }
1056
1057 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1058 for _, vStep := range valueSteps {
1059 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1060 switch vStep.kind {
1061 case abiStepPointer:
1062
1063 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1064 case abiStepIntReg:
1065 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1066 case abiStepFloatReg:
1067 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1068 default:
1069 panic("unexpected value step")
1070 }
1071 }
1072 continue
1073 }
1074
1075 if len(valueSteps) != len(methodSteps) {
1076
1077
1078
1079 panic("method ABI and value ABI don't align")
1080 }
1081 for i, vStep := range valueSteps {
1082 mStep := methodSteps[i]
1083 if mStep.kind != vStep.kind {
1084 panic("method ABI and value ABI don't align")
1085 }
1086 switch vStep.kind {
1087 case abiStepPointer:
1088
1089 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1090 fallthrough
1091 case abiStepIntReg:
1092 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1093 case abiStepFloatReg:
1094 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1095 default:
1096 panic("unexpected value step")
1097 }
1098 }
1099 }
1100
1101 methodFrameSize := methodFrameType.Size()
1102
1103
1104 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1105 methodFrameSize += methodABI.spill
1106
1107
1108 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1109
1110
1111
1112
1113 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.Size()), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124 if valueRegs != nil {
1125 *valueRegs = methodRegs
1126 }
1127 if retSize := methodFrameType.Size() - methodABI.retOffset; retSize > 0 {
1128 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1129 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1130
1131 memmove(valueRet, methodRet, retSize)
1132 }
1133
1134
1135
1136 *retValid = true
1137
1138
1139
1140
1141 typedmemclr(methodFrameType, methodFrame)
1142 methodFramePool.Put(methodFrame)
1143
1144
1145 runtime.KeepAlive(ctxt)
1146
1147
1148
1149
1150 runtime.KeepAlive(valueRegs)
1151 }
1152
1153
1154 func funcName(f func([]Value) []Value) string {
1155 pc := *(*uintptr)(unsafe.Pointer(&f))
1156 rf := runtime.FuncForPC(pc)
1157 if rf != nil {
1158 return rf.Name()
1159 }
1160 return "closure"
1161 }
1162
1163
1164
1165 func (v Value) Cap() int {
1166
1167 if v.kind() == Slice {
1168 return (*unsafeheader.Slice)(v.ptr).Cap
1169 }
1170 return v.capNonSlice()
1171 }
1172
1173 func (v Value) capNonSlice() int {
1174 k := v.kind()
1175 switch k {
1176 case Array:
1177 return v.typ().Len()
1178 case Chan:
1179 return chancap(v.pointer())
1180 case Ptr:
1181 if v.typ().Elem().Kind() == abi.Array {
1182 return v.typ().Elem().Len()
1183 }
1184 panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
1185 }
1186 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1187 }
1188
1189
1190
1191
1192 func (v Value) Close() {
1193 v.mustBe(Chan)
1194 v.mustBeExported()
1195 tt := (*chanType)(unsafe.Pointer(v.typ()))
1196 if ChanDir(tt.Dir)&SendDir == 0 {
1197 panic("reflect: close of receive-only channel")
1198 }
1199
1200 chanclose(v.pointer())
1201 }
1202
1203
1204 func (v Value) CanComplex() bool {
1205 switch v.kind() {
1206 case Complex64, Complex128:
1207 return true
1208 default:
1209 return false
1210 }
1211 }
1212
1213
1214
1215 func (v Value) Complex() complex128 {
1216 k := v.kind()
1217 switch k {
1218 case Complex64:
1219 return complex128(*(*complex64)(v.ptr))
1220 case Complex128:
1221 return *(*complex128)(v.ptr)
1222 }
1223 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1224 }
1225
1226
1227
1228
1229
1230 func (v Value) Elem() Value {
1231 k := v.kind()
1232 switch k {
1233 case Interface:
1234 var eface any
1235 if v.typ().NumMethod() == 0 {
1236 eface = *(*any)(v.ptr)
1237 } else {
1238 eface = (any)(*(*interface {
1239 M()
1240 })(v.ptr))
1241 }
1242 x := unpackEface(eface)
1243 if x.flag != 0 {
1244 x.flag |= v.flag.ro()
1245 }
1246 return x
1247 case Pointer:
1248 ptr := v.ptr
1249 if v.flag&flagIndir != 0 {
1250 if ifaceIndir(v.typ()) {
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1262 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1263 }
1264 }
1265 ptr = *(*unsafe.Pointer)(ptr)
1266 }
1267
1268 if ptr == nil {
1269 return Value{}
1270 }
1271 tt := (*ptrType)(unsafe.Pointer(v.typ()))
1272 typ := tt.Elem
1273 fl := v.flag&flagRO | flagIndir | flagAddr
1274 fl |= flag(typ.Kind())
1275 return Value{typ, ptr, fl}
1276 }
1277 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1278 }
1279
1280
1281
1282 func (v Value) Field(i int) Value {
1283 if v.kind() != Struct {
1284 panic(&ValueError{"reflect.Value.Field", v.kind()})
1285 }
1286 tt := (*structType)(unsafe.Pointer(v.typ()))
1287 if uint(i) >= uint(len(tt.Fields)) {
1288 panic("reflect: Field index out of range")
1289 }
1290 field := &tt.Fields[i]
1291 typ := field.Typ
1292
1293
1294 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1295
1296 if !field.Name.IsExported() {
1297 if field.Embedded() {
1298 fl |= flagEmbedRO
1299 } else {
1300 fl |= flagStickyRO
1301 }
1302 }
1303
1304
1305
1306
1307
1308 ptr := add(v.ptr, field.Offset, "same as non-reflect &v.field")
1309 return Value{typ, ptr, fl}
1310 }
1311
1312
1313
1314
1315 func (v Value) FieldByIndex(index []int) Value {
1316 if len(index) == 1 {
1317 return v.Field(index[0])
1318 }
1319 v.mustBe(Struct)
1320 for i, x := range index {
1321 if i > 0 {
1322 if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct {
1323 if v.IsNil() {
1324 panic("reflect: indirection through nil pointer to embedded struct")
1325 }
1326 v = v.Elem()
1327 }
1328 }
1329 v = v.Field(x)
1330 }
1331 return v
1332 }
1333
1334
1335
1336
1337
1338 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1339 if len(index) == 1 {
1340 return v.Field(index[0]), nil
1341 }
1342 v.mustBe(Struct)
1343 for i, x := range index {
1344 if i > 0 {
1345 if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct {
1346 if v.IsNil() {
1347 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem()))
1348 }
1349 v = v.Elem()
1350 }
1351 }
1352 v = v.Field(x)
1353 }
1354 return v, nil
1355 }
1356
1357
1358
1359
1360 func (v Value) FieldByName(name string) Value {
1361 v.mustBe(Struct)
1362 if f, ok := toRType(v.typ()).FieldByName(name); ok {
1363 return v.FieldByIndex(f.Index)
1364 }
1365 return Value{}
1366 }
1367
1368
1369
1370
1371
1372 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1373 if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok {
1374 return v.FieldByIndex(f.Index)
1375 }
1376 return Value{}
1377 }
1378
1379
1380 func (v Value) CanFloat() bool {
1381 switch v.kind() {
1382 case Float32, Float64:
1383 return true
1384 default:
1385 return false
1386 }
1387 }
1388
1389
1390
1391 func (v Value) Float() float64 {
1392 k := v.kind()
1393 switch k {
1394 case Float32:
1395 return float64(*(*float32)(v.ptr))
1396 case Float64:
1397 return *(*float64)(v.ptr)
1398 }
1399 panic(&ValueError{"reflect.Value.Float", v.kind()})
1400 }
1401
1402 var uint8Type = rtypeOf(uint8(0))
1403
1404
1405
1406 func (v Value) Index(i int) Value {
1407 switch v.kind() {
1408 case Array:
1409 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1410 if uint(i) >= uint(tt.Len) {
1411 panic("reflect: array index out of range")
1412 }
1413 typ := tt.Elem
1414 offset := uintptr(i) * typ.Size()
1415
1416
1417
1418
1419
1420
1421 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1422 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1423 return Value{typ, val, fl}
1424
1425 case Slice:
1426
1427
1428 s := (*unsafeheader.Slice)(v.ptr)
1429 if uint(i) >= uint(s.Len) {
1430 panic("reflect: slice index out of range")
1431 }
1432 tt := (*sliceType)(unsafe.Pointer(v.typ()))
1433 typ := tt.Elem
1434 val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
1435 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1436 return Value{typ, val, fl}
1437
1438 case String:
1439 s := (*unsafeheader.String)(v.ptr)
1440 if uint(i) >= uint(s.Len) {
1441 panic("reflect: string index out of range")
1442 }
1443 p := arrayAt(s.Data, i, 1, "i < s.Len")
1444 fl := v.flag.ro() | flag(Uint8) | flagIndir
1445 return Value{uint8Type, p, fl}
1446 }
1447 panic(&ValueError{"reflect.Value.Index", v.kind()})
1448 }
1449
1450
1451 func (v Value) CanInt() bool {
1452 switch v.kind() {
1453 case Int, Int8, Int16, Int32, Int64:
1454 return true
1455 default:
1456 return false
1457 }
1458 }
1459
1460
1461
1462 func (v Value) Int() int64 {
1463 k := v.kind()
1464 p := v.ptr
1465 switch k {
1466 case Int:
1467 return int64(*(*int)(p))
1468 case Int8:
1469 return int64(*(*int8)(p))
1470 case Int16:
1471 return int64(*(*int16)(p))
1472 case Int32:
1473 return int64(*(*int32)(p))
1474 case Int64:
1475 return *(*int64)(p)
1476 }
1477 panic(&ValueError{"reflect.Value.Int", v.kind()})
1478 }
1479
1480
1481 func (v Value) CanInterface() bool {
1482 if v.flag == 0 {
1483 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1484 }
1485 return v.flag&flagRO == 0
1486 }
1487
1488
1489
1490
1491
1492
1493
1494
1495 func (v Value) Interface() (i any) {
1496 return valueInterface(v, true)
1497 }
1498
1499 func valueInterface(v Value, safe bool) any {
1500 if v.flag == 0 {
1501 panic(&ValueError{"reflect.Value.Interface", Invalid})
1502 }
1503 if safe && v.flag&flagRO != 0 {
1504
1505
1506
1507 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1508 }
1509 if v.flag&flagMethod != 0 {
1510 v = makeMethodValue("Interface", v)
1511 }
1512
1513 if v.kind() == Interface {
1514
1515
1516
1517 if v.NumMethod() == 0 {
1518 return *(*any)(v.ptr)
1519 }
1520 return *(*interface {
1521 M()
1522 })(v.ptr)
1523 }
1524
1525
1526 return packEface(v)
1527 }
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538 func (v Value) InterfaceData() [2]uintptr {
1539 v.mustBe(Interface)
1540
1541 escapes(v.ptr)
1542
1543
1544
1545
1546
1547 return *(*[2]uintptr)(v.ptr)
1548 }
1549
1550
1551
1552
1553
1554
1555
1556
1557 func (v Value) IsNil() bool {
1558 k := v.kind()
1559 switch k {
1560 case Chan, Func, Map, Pointer, UnsafePointer:
1561 if v.flag&flagMethod != 0 {
1562 return false
1563 }
1564 ptr := v.ptr
1565 if v.flag&flagIndir != 0 {
1566 ptr = *(*unsafe.Pointer)(ptr)
1567 }
1568 return ptr == nil
1569 case Interface, Slice:
1570
1571
1572 return *(*unsafe.Pointer)(v.ptr) == nil
1573 }
1574 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1575 }
1576
1577
1578
1579
1580
1581
1582 func (v Value) IsValid() bool {
1583 return v.flag != 0
1584 }
1585
1586
1587
1588 func (v Value) IsZero() bool {
1589 switch v.kind() {
1590 case Bool:
1591 return !v.Bool()
1592 case Int, Int8, Int16, Int32, Int64:
1593 return v.Int() == 0
1594 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1595 return v.Uint() == 0
1596 case Float32, Float64:
1597 return v.Float() == 0
1598 case Complex64, Complex128:
1599 return v.Complex() == 0
1600 case Array:
1601 if v.flag&flagIndir == 0 {
1602 return v.ptr == nil
1603 }
1604 typ := (*abi.ArrayType)(unsafe.Pointer(v.typ()))
1605
1606 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1607
1608
1609
1610 return typ.Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1611 }
1612 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1613
1614
1615 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1616 }
1617 n := int(typ.Len)
1618 for i := 0; i < n; i++ {
1619 if !v.Index(i).IsZero() {
1620 return false
1621 }
1622 }
1623 return true
1624 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1625 return v.IsNil()
1626 case String:
1627 return v.Len() == 0
1628 case Struct:
1629 if v.flag&flagIndir == 0 {
1630 return v.ptr == nil
1631 }
1632 typ := (*abi.StructType)(unsafe.Pointer(v.typ()))
1633
1634 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1635
1636 return typ.Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1637 }
1638 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1639
1640
1641 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1642 }
1643
1644 n := v.NumField()
1645 for i := 0; i < n; i++ {
1646 if !v.Field(i).IsZero() && v.Type().Field(i).Name != "_" {
1647 return false
1648 }
1649 }
1650 return true
1651 default:
1652
1653
1654 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1655 }
1656 }
1657
1658
1659
1660 func isZero(b []byte) bool {
1661 if len(b) == 0 {
1662 return true
1663 }
1664 const n = 32
1665
1666 for uintptr(unsafe.Pointer(&b[0]))%8 != 0 {
1667 if b[0] != 0 {
1668 return false
1669 }
1670 b = b[1:]
1671 if len(b) == 0 {
1672 return true
1673 }
1674 }
1675 for len(b)%8 != 0 {
1676 if b[len(b)-1] != 0 {
1677 return false
1678 }
1679 b = b[:len(b)-1]
1680 }
1681 if len(b) == 0 {
1682 return true
1683 }
1684 w := unsafe.Slice((*uint64)(unsafe.Pointer(&b[0])), len(b)/8)
1685 for len(w)%n != 0 {
1686 if w[0] != 0 {
1687 return false
1688 }
1689 w = w[1:]
1690 }
1691 for len(w) >= n {
1692 if w[0] != 0 || w[1] != 0 || w[2] != 0 || w[3] != 0 ||
1693 w[4] != 0 || w[5] != 0 || w[6] != 0 || w[7] != 0 ||
1694 w[8] != 0 || w[9] != 0 || w[10] != 0 || w[11] != 0 ||
1695 w[12] != 0 || w[13] != 0 || w[14] != 0 || w[15] != 0 ||
1696 w[16] != 0 || w[17] != 0 || w[18] != 0 || w[19] != 0 ||
1697 w[20] != 0 || w[21] != 0 || w[22] != 0 || w[23] != 0 ||
1698 w[24] != 0 || w[25] != 0 || w[26] != 0 || w[27] != 0 ||
1699 w[28] != 0 || w[29] != 0 || w[30] != 0 || w[31] != 0 {
1700 return false
1701 }
1702 w = w[n:]
1703 }
1704 return true
1705 }
1706
1707
1708
1709 func (v Value) SetZero() {
1710 v.mustBeAssignable()
1711 switch v.kind() {
1712 case Bool:
1713 *(*bool)(v.ptr) = false
1714 case Int:
1715 *(*int)(v.ptr) = 0
1716 case Int8:
1717 *(*int8)(v.ptr) = 0
1718 case Int16:
1719 *(*int16)(v.ptr) = 0
1720 case Int32:
1721 *(*int32)(v.ptr) = 0
1722 case Int64:
1723 *(*int64)(v.ptr) = 0
1724 case Uint:
1725 *(*uint)(v.ptr) = 0
1726 case Uint8:
1727 *(*uint8)(v.ptr) = 0
1728 case Uint16:
1729 *(*uint16)(v.ptr) = 0
1730 case Uint32:
1731 *(*uint32)(v.ptr) = 0
1732 case Uint64:
1733 *(*uint64)(v.ptr) = 0
1734 case Uintptr:
1735 *(*uintptr)(v.ptr) = 0
1736 case Float32:
1737 *(*float32)(v.ptr) = 0
1738 case Float64:
1739 *(*float64)(v.ptr) = 0
1740 case Complex64:
1741 *(*complex64)(v.ptr) = 0
1742 case Complex128:
1743 *(*complex128)(v.ptr) = 0
1744 case String:
1745 *(*string)(v.ptr) = ""
1746 case Slice:
1747 *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{}
1748 case Interface:
1749 *(*emptyInterface)(v.ptr) = emptyInterface{}
1750 case Chan, Func, Map, Pointer, UnsafePointer:
1751 *(*unsafe.Pointer)(v.ptr) = nil
1752 case Array, Struct:
1753 typedmemclr(v.typ(), v.ptr)
1754 default:
1755
1756
1757 panic(&ValueError{"reflect.Value.SetZero", v.Kind()})
1758 }
1759 }
1760
1761
1762
1763 func (v Value) Kind() Kind {
1764 return v.kind()
1765 }
1766
1767
1768
1769 func (v Value) Len() int {
1770
1771 if v.kind() == Slice {
1772 return (*unsafeheader.Slice)(v.ptr).Len
1773 }
1774 return v.lenNonSlice()
1775 }
1776
1777 func (v Value) lenNonSlice() int {
1778 switch k := v.kind(); k {
1779 case Array:
1780 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1781 return int(tt.Len)
1782 case Chan:
1783 return chanlen(v.pointer())
1784 case Map:
1785 return maplen(v.pointer())
1786 case String:
1787
1788 return (*unsafeheader.String)(v.ptr).Len
1789 case Ptr:
1790 if v.typ().Elem().Kind() == abi.Array {
1791 return v.typ().Elem().Len()
1792 }
1793 panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
1794 }
1795 panic(&ValueError{"reflect.Value.Len", v.kind()})
1796 }
1797
1798 var stringType = rtypeOf("")
1799
1800
1801
1802
1803
1804 func (v Value) MapIndex(key Value) Value {
1805 v.mustBe(Map)
1806 tt := (*mapType)(unsafe.Pointer(v.typ()))
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816 var e unsafe.Pointer
1817 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
1818 k := *(*string)(key.ptr)
1819 e = mapaccess_faststr(v.typ(), v.pointer(), k)
1820 } else {
1821 key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
1822 var k unsafe.Pointer
1823 if key.flag&flagIndir != 0 {
1824 k = key.ptr
1825 } else {
1826 k = unsafe.Pointer(&key.ptr)
1827 }
1828 e = mapaccess(v.typ(), v.pointer(), k)
1829 }
1830 if e == nil {
1831 return Value{}
1832 }
1833 typ := tt.Elem
1834 fl := (v.flag | key.flag).ro()
1835 fl |= flag(typ.Kind())
1836 return copyVal(typ, fl, e)
1837 }
1838
1839
1840
1841
1842
1843 func (v Value) MapKeys() []Value {
1844 v.mustBe(Map)
1845 tt := (*mapType)(unsafe.Pointer(v.typ()))
1846 keyType := tt.Key
1847
1848 fl := v.flag.ro() | flag(keyType.Kind())
1849
1850 m := v.pointer()
1851 mlen := int(0)
1852 if m != nil {
1853 mlen = maplen(m)
1854 }
1855 var it hiter
1856 mapiterinit(v.typ(), m, &it)
1857 a := make([]Value, mlen)
1858 var i int
1859 for i = 0; i < len(a); i++ {
1860 key := mapiterkey(&it)
1861 if key == nil {
1862
1863
1864
1865 break
1866 }
1867 a[i] = copyVal(keyType, fl, key)
1868 mapiternext(&it)
1869 }
1870 return a[:i]
1871 }
1872
1873
1874
1875
1876
1877 type hiter struct {
1878 key unsafe.Pointer
1879 elem unsafe.Pointer
1880 t unsafe.Pointer
1881 h unsafe.Pointer
1882 buckets unsafe.Pointer
1883 bptr unsafe.Pointer
1884 overflow *[]unsafe.Pointer
1885 oldoverflow *[]unsafe.Pointer
1886 startBucket uintptr
1887 offset uint8
1888 wrapped bool
1889 B uint8
1890 i uint8
1891 bucket uintptr
1892 checkBucket uintptr
1893 }
1894
1895 func (h *hiter) initialized() bool {
1896 return h.t != nil
1897 }
1898
1899
1900
1901 type MapIter struct {
1902 m Value
1903 hiter hiter
1904 }
1905
1906
1907 func (iter *MapIter) Key() Value {
1908 if !iter.hiter.initialized() {
1909 panic("MapIter.Key called before Next")
1910 }
1911 iterkey := mapiterkey(&iter.hiter)
1912 if iterkey == nil {
1913 panic("MapIter.Key called on exhausted iterator")
1914 }
1915
1916 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1917 ktype := t.Key
1918 return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
1919 }
1920
1921
1922
1923
1924
1925 func (v Value) SetIterKey(iter *MapIter) {
1926 if !iter.hiter.initialized() {
1927 panic("reflect: Value.SetIterKey called before Next")
1928 }
1929 iterkey := mapiterkey(&iter.hiter)
1930 if iterkey == nil {
1931 panic("reflect: Value.SetIterKey called on exhausted iterator")
1932 }
1933
1934 v.mustBeAssignable()
1935 var target unsafe.Pointer
1936 if v.kind() == Interface {
1937 target = v.ptr
1938 }
1939
1940 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1941 ktype := t.Key
1942
1943 iter.m.mustBeExported()
1944 key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
1945 key = key.assignTo("reflect.MapIter.SetKey", v.typ(), target)
1946 typedmemmove(v.typ(), v.ptr, key.ptr)
1947 }
1948
1949
1950 func (iter *MapIter) Value() Value {
1951 if !iter.hiter.initialized() {
1952 panic("MapIter.Value called before Next")
1953 }
1954 iterelem := mapiterelem(&iter.hiter)
1955 if iterelem == nil {
1956 panic("MapIter.Value called on exhausted iterator")
1957 }
1958
1959 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1960 vtype := t.Elem
1961 return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
1962 }
1963
1964
1965
1966
1967
1968 func (v Value) SetIterValue(iter *MapIter) {
1969 if !iter.hiter.initialized() {
1970 panic("reflect: Value.SetIterValue called before Next")
1971 }
1972 iterelem := mapiterelem(&iter.hiter)
1973 if iterelem == nil {
1974 panic("reflect: Value.SetIterValue called on exhausted iterator")
1975 }
1976
1977 v.mustBeAssignable()
1978 var target unsafe.Pointer
1979 if v.kind() == Interface {
1980 target = v.ptr
1981 }
1982
1983 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1984 vtype := t.Elem
1985
1986 iter.m.mustBeExported()
1987 elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
1988 elem = elem.assignTo("reflect.MapIter.SetValue", v.typ(), target)
1989 typedmemmove(v.typ(), v.ptr, elem.ptr)
1990 }
1991
1992
1993
1994
1995 func (iter *MapIter) Next() bool {
1996 if !iter.m.IsValid() {
1997 panic("MapIter.Next called on an iterator that does not have an associated map Value")
1998 }
1999 if !iter.hiter.initialized() {
2000 mapiterinit(iter.m.typ(), iter.m.pointer(), &iter.hiter)
2001 } else {
2002 if mapiterkey(&iter.hiter) == nil {
2003 panic("MapIter.Next called on exhausted iterator")
2004 }
2005 mapiternext(&iter.hiter)
2006 }
2007 return mapiterkey(&iter.hiter) != nil
2008 }
2009
2010
2011
2012
2013
2014 func (iter *MapIter) Reset(v Value) {
2015 if v.IsValid() {
2016 v.mustBe(Map)
2017 }
2018 iter.m = v
2019 iter.hiter = hiter{}
2020 }
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037 func (v Value) MapRange() *MapIter {
2038
2039
2040
2041
2042 if v.kind() != Map {
2043 v.panicNotMap()
2044 }
2045 return &MapIter{m: v}
2046 }
2047
2048
2049
2050
2051
2052
2053 func (f flag) panicNotMap() {
2054 f.mustBe(Map)
2055 }
2056
2057
2058
2059 func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value {
2060 if typ.IfaceIndir() {
2061
2062
2063 c := unsafe_New(typ)
2064 typedmemmove(typ, c, ptr)
2065 return Value{typ, c, fl | flagIndir}
2066 }
2067 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
2068 }
2069
2070
2071
2072
2073
2074 func (v Value) Method(i int) Value {
2075 if v.typ() == nil {
2076 panic(&ValueError{"reflect.Value.Method", Invalid})
2077 }
2078 if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) {
2079 panic("reflect: Method index out of range")
2080 }
2081 if v.typ().Kind() == abi.Interface && v.IsNil() {
2082 panic("reflect: Method on nil interface value")
2083 }
2084 fl := v.flag.ro() | (v.flag & flagIndir)
2085 fl |= flag(Func)
2086 fl |= flag(i)<<flagMethodShift | flagMethod
2087 return Value{v.typ(), v.ptr, fl}
2088 }
2089
2090
2091
2092
2093
2094
2095 func (v Value) NumMethod() int {
2096 if v.typ() == nil {
2097 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
2098 }
2099 if v.flag&flagMethod != 0 {
2100 return 0
2101 }
2102 return toRType(v.typ()).NumMethod()
2103 }
2104
2105
2106
2107
2108
2109
2110 func (v Value) MethodByName(name string) Value {
2111 if v.typ() == nil {
2112 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
2113 }
2114 if v.flag&flagMethod != 0 {
2115 return Value{}
2116 }
2117 m, ok := toRType(v.typ()).MethodByName(name)
2118 if !ok {
2119 return Value{}
2120 }
2121 return v.Method(m.Index)
2122 }
2123
2124
2125
2126 func (v Value) NumField() int {
2127 v.mustBe(Struct)
2128 tt := (*structType)(unsafe.Pointer(v.typ()))
2129 return len(tt.Fields)
2130 }
2131
2132
2133
2134 func (v Value) OverflowComplex(x complex128) bool {
2135 k := v.kind()
2136 switch k {
2137 case Complex64:
2138 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
2139 case Complex128:
2140 return false
2141 }
2142 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
2143 }
2144
2145
2146
2147 func (v Value) OverflowFloat(x float64) bool {
2148 k := v.kind()
2149 switch k {
2150 case Float32:
2151 return overflowFloat32(x)
2152 case Float64:
2153 return false
2154 }
2155 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
2156 }
2157
2158 func overflowFloat32(x float64) bool {
2159 if x < 0 {
2160 x = -x
2161 }
2162 return math.MaxFloat32 < x && x <= math.MaxFloat64
2163 }
2164
2165
2166
2167 func (v Value) OverflowInt(x int64) bool {
2168 k := v.kind()
2169 switch k {
2170 case Int, Int8, Int16, Int32, Int64:
2171 bitSize := v.typ().Size() * 8
2172 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2173 return x != trunc
2174 }
2175 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
2176 }
2177
2178
2179
2180 func (v Value) OverflowUint(x uint64) bool {
2181 k := v.kind()
2182 switch k {
2183 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
2184 bitSize := v.typ_.Size() * 8
2185 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2186 return x != trunc
2187 }
2188 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
2189 }
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209 func (v Value) Pointer() uintptr {
2210
2211 escapes(v.ptr)
2212
2213 k := v.kind()
2214 switch k {
2215 case Pointer:
2216 if v.typ().PtrBytes == 0 {
2217 val := *(*uintptr)(v.ptr)
2218
2219
2220 if !verifyNotInHeapPtr(val) {
2221 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
2222 }
2223 return val
2224 }
2225 fallthrough
2226 case Chan, Map, UnsafePointer:
2227 return uintptr(v.pointer())
2228 case Func:
2229 if v.flag&flagMethod != 0 {
2230
2231
2232
2233
2234
2235
2236 return methodValueCallCodePtr()
2237 }
2238 p := v.pointer()
2239
2240
2241 if p != nil {
2242 p = *(*unsafe.Pointer)(p)
2243 }
2244 return uintptr(p)
2245
2246 case Slice:
2247 return uintptr((*unsafeheader.Slice)(v.ptr).Data)
2248 }
2249 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2250 }
2251
2252
2253
2254
2255
2256
2257 func (v Value) Recv() (x Value, ok bool) {
2258 v.mustBe(Chan)
2259 v.mustBeExported()
2260 return v.recv(false)
2261 }
2262
2263
2264
2265 func (v Value) recv(nb bool) (val Value, ok bool) {
2266 tt := (*chanType)(unsafe.Pointer(v.typ()))
2267 if ChanDir(tt.Dir)&RecvDir == 0 {
2268 panic("reflect: recv on send-only channel")
2269 }
2270 t := tt.Elem
2271 val = Value{t, nil, flag(t.Kind())}
2272 var p unsafe.Pointer
2273 if ifaceIndir(t) {
2274 p = unsafe_New(t)
2275 val.ptr = p
2276 val.flag |= flagIndir
2277 } else {
2278 p = unsafe.Pointer(&val.ptr)
2279 }
2280 selected, ok := chanrecv(v.pointer(), nb, p)
2281 if !selected {
2282 val = Value{}
2283 }
2284 return
2285 }
2286
2287
2288
2289
2290 func (v Value) Send(x Value) {
2291 v.mustBe(Chan)
2292 v.mustBeExported()
2293 v.send(x, false)
2294 }
2295
2296
2297
2298 func (v Value) send(x Value, nb bool) (selected bool) {
2299 tt := (*chanType)(unsafe.Pointer(v.typ()))
2300 if ChanDir(tt.Dir)&SendDir == 0 {
2301 panic("reflect: send on recv-only channel")
2302 }
2303 x.mustBeExported()
2304 x = x.assignTo("reflect.Value.Send", tt.Elem, nil)
2305 var p unsafe.Pointer
2306 if x.flag&flagIndir != 0 {
2307 p = x.ptr
2308 } else {
2309 p = unsafe.Pointer(&x.ptr)
2310 }
2311 return chansend(v.pointer(), p, nb)
2312 }
2313
2314
2315
2316
2317
2318 func (v Value) Set(x Value) {
2319 v.mustBeAssignable()
2320 x.mustBeExported()
2321 var target unsafe.Pointer
2322 if v.kind() == Interface {
2323 target = v.ptr
2324 }
2325 x = x.assignTo("reflect.Set", v.typ(), target)
2326 if x.flag&flagIndir != 0 {
2327 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2328 typedmemclr(v.typ(), v.ptr)
2329 } else {
2330 typedmemmove(v.typ(), v.ptr, x.ptr)
2331 }
2332 } else {
2333 *(*unsafe.Pointer)(v.ptr) = x.ptr
2334 }
2335 }
2336
2337
2338
2339 func (v Value) SetBool(x bool) {
2340 v.mustBeAssignable()
2341 v.mustBe(Bool)
2342 *(*bool)(v.ptr) = x
2343 }
2344
2345
2346
2347 func (v Value) SetBytes(x []byte) {
2348 v.mustBeAssignable()
2349 v.mustBe(Slice)
2350 if toRType(v.typ()).Elem().Kind() != Uint8 {
2351 panic("reflect.Value.SetBytes of non-byte slice")
2352 }
2353 *(*[]byte)(v.ptr) = x
2354 }
2355
2356
2357
2358 func (v Value) setRunes(x []rune) {
2359 v.mustBeAssignable()
2360 v.mustBe(Slice)
2361 if v.typ().Elem().Kind() != abi.Int32 {
2362 panic("reflect.Value.setRunes of non-rune slice")
2363 }
2364 *(*[]rune)(v.ptr) = x
2365 }
2366
2367
2368
2369 func (v Value) SetComplex(x complex128) {
2370 v.mustBeAssignable()
2371 switch k := v.kind(); k {
2372 default:
2373 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2374 case Complex64:
2375 *(*complex64)(v.ptr) = complex64(x)
2376 case Complex128:
2377 *(*complex128)(v.ptr) = x
2378 }
2379 }
2380
2381
2382
2383 func (v Value) SetFloat(x float64) {
2384 v.mustBeAssignable()
2385 switch k := v.kind(); k {
2386 default:
2387 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2388 case Float32:
2389 *(*float32)(v.ptr) = float32(x)
2390 case Float64:
2391 *(*float64)(v.ptr) = x
2392 }
2393 }
2394
2395
2396
2397 func (v Value) SetInt(x int64) {
2398 v.mustBeAssignable()
2399 switch k := v.kind(); k {
2400 default:
2401 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2402 case Int:
2403 *(*int)(v.ptr) = int(x)
2404 case Int8:
2405 *(*int8)(v.ptr) = int8(x)
2406 case Int16:
2407 *(*int16)(v.ptr) = int16(x)
2408 case Int32:
2409 *(*int32)(v.ptr) = int32(x)
2410 case Int64:
2411 *(*int64)(v.ptr) = x
2412 }
2413 }
2414
2415
2416
2417
2418 func (v Value) SetLen(n int) {
2419 v.mustBeAssignable()
2420 v.mustBe(Slice)
2421 s := (*unsafeheader.Slice)(v.ptr)
2422 if uint(n) > uint(s.Cap) {
2423 panic("reflect: slice length out of range in SetLen")
2424 }
2425 s.Len = n
2426 }
2427
2428
2429
2430
2431 func (v Value) SetCap(n int) {
2432 v.mustBeAssignable()
2433 v.mustBe(Slice)
2434 s := (*unsafeheader.Slice)(v.ptr)
2435 if n < s.Len || n > s.Cap {
2436 panic("reflect: slice capacity out of range in SetCap")
2437 }
2438 s.Cap = n
2439 }
2440
2441
2442
2443
2444
2445
2446
2447 func (v Value) SetMapIndex(key, elem Value) {
2448 v.mustBe(Map)
2449 v.mustBeExported()
2450 key.mustBeExported()
2451 tt := (*mapType)(unsafe.Pointer(v.typ()))
2452
2453 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
2454 k := *(*string)(key.ptr)
2455 if elem.typ() == nil {
2456 mapdelete_faststr(v.typ(), v.pointer(), k)
2457 return
2458 }
2459 elem.mustBeExported()
2460 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2461 var e unsafe.Pointer
2462 if elem.flag&flagIndir != 0 {
2463 e = elem.ptr
2464 } else {
2465 e = unsafe.Pointer(&elem.ptr)
2466 }
2467 mapassign_faststr(v.typ(), v.pointer(), k, e)
2468 return
2469 }
2470
2471 key = key.assignTo("reflect.Value.SetMapIndex", tt.Key, nil)
2472 var k unsafe.Pointer
2473 if key.flag&flagIndir != 0 {
2474 k = key.ptr
2475 } else {
2476 k = unsafe.Pointer(&key.ptr)
2477 }
2478 if elem.typ() == nil {
2479 mapdelete(v.typ(), v.pointer(), k)
2480 return
2481 }
2482 elem.mustBeExported()
2483 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2484 var e unsafe.Pointer
2485 if elem.flag&flagIndir != 0 {
2486 e = elem.ptr
2487 } else {
2488 e = unsafe.Pointer(&elem.ptr)
2489 }
2490 mapassign(v.typ(), v.pointer(), k, e)
2491 }
2492
2493
2494
2495 func (v Value) SetUint(x uint64) {
2496 v.mustBeAssignable()
2497 switch k := v.kind(); k {
2498 default:
2499 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2500 case Uint:
2501 *(*uint)(v.ptr) = uint(x)
2502 case Uint8:
2503 *(*uint8)(v.ptr) = uint8(x)
2504 case Uint16:
2505 *(*uint16)(v.ptr) = uint16(x)
2506 case Uint32:
2507 *(*uint32)(v.ptr) = uint32(x)
2508 case Uint64:
2509 *(*uint64)(v.ptr) = x
2510 case Uintptr:
2511 *(*uintptr)(v.ptr) = uintptr(x)
2512 }
2513 }
2514
2515
2516
2517 func (v Value) SetPointer(x unsafe.Pointer) {
2518 v.mustBeAssignable()
2519 v.mustBe(UnsafePointer)
2520 *(*unsafe.Pointer)(v.ptr) = x
2521 }
2522
2523
2524
2525 func (v Value) SetString(x string) {
2526 v.mustBeAssignable()
2527 v.mustBe(String)
2528 *(*string)(v.ptr) = x
2529 }
2530
2531
2532
2533
2534 func (v Value) Slice(i, j int) Value {
2535 var (
2536 cap int
2537 typ *sliceType
2538 base unsafe.Pointer
2539 )
2540 switch kind := v.kind(); kind {
2541 default:
2542 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2543
2544 case Array:
2545 if v.flag&flagAddr == 0 {
2546 panic("reflect.Value.Slice: slice of unaddressable array")
2547 }
2548 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2549 cap = int(tt.Len)
2550 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2551 base = v.ptr
2552
2553 case Slice:
2554 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2555 s := (*unsafeheader.Slice)(v.ptr)
2556 base = s.Data
2557 cap = s.Cap
2558
2559 case String:
2560 s := (*unsafeheader.String)(v.ptr)
2561 if i < 0 || j < i || j > s.Len {
2562 panic("reflect.Value.Slice: string slice index out of bounds")
2563 }
2564 var t unsafeheader.String
2565 if i < s.Len {
2566 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2567 }
2568 return Value{v.typ(), unsafe.Pointer(&t), v.flag}
2569 }
2570
2571 if i < 0 || j < i || j > cap {
2572 panic("reflect.Value.Slice: slice index out of bounds")
2573 }
2574
2575
2576 var x []unsafe.Pointer
2577
2578
2579 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2580 s.Len = j - i
2581 s.Cap = cap - i
2582 if cap-i > 0 {
2583 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < cap")
2584 } else {
2585
2586 s.Data = base
2587 }
2588
2589 fl := v.flag.ro() | flagIndir | flag(Slice)
2590 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2591 }
2592
2593
2594
2595
2596 func (v Value) Slice3(i, j, k int) Value {
2597 var (
2598 cap int
2599 typ *sliceType
2600 base unsafe.Pointer
2601 )
2602 switch kind := v.kind(); kind {
2603 default:
2604 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2605
2606 case Array:
2607 if v.flag&flagAddr == 0 {
2608 panic("reflect.Value.Slice3: slice of unaddressable array")
2609 }
2610 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2611 cap = int(tt.Len)
2612 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2613 base = v.ptr
2614
2615 case Slice:
2616 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2617 s := (*unsafeheader.Slice)(v.ptr)
2618 base = s.Data
2619 cap = s.Cap
2620 }
2621
2622 if i < 0 || j < i || k < j || k > cap {
2623 panic("reflect.Value.Slice3: slice index out of bounds")
2624 }
2625
2626
2627
2628 var x []unsafe.Pointer
2629
2630
2631 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2632 s.Len = j - i
2633 s.Cap = k - i
2634 if k-i > 0 {
2635 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < k <= cap")
2636 } else {
2637
2638 s.Data = base
2639 }
2640
2641 fl := v.flag.ro() | flagIndir | flag(Slice)
2642 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2643 }
2644
2645
2646
2647
2648
2649
2650
2651 func (v Value) String() string {
2652
2653 if v.kind() == String {
2654 return *(*string)(v.ptr)
2655 }
2656 return v.stringNonString()
2657 }
2658
2659 func (v Value) stringNonString() string {
2660 if v.kind() == Invalid {
2661 return "<invalid Value>"
2662 }
2663
2664
2665 return "<" + v.Type().String() + " Value>"
2666 }
2667
2668
2669
2670
2671
2672
2673 func (v Value) TryRecv() (x Value, ok bool) {
2674 v.mustBe(Chan)
2675 v.mustBeExported()
2676 return v.recv(true)
2677 }
2678
2679
2680
2681
2682
2683 func (v Value) TrySend(x Value) bool {
2684 v.mustBe(Chan)
2685 v.mustBeExported()
2686 return v.send(x, true)
2687 }
2688
2689
2690 func (v Value) Type() Type {
2691 if v.flag != 0 && v.flag&flagMethod == 0 {
2692 return (*rtype)(noescape(unsafe.Pointer(v.typ_)))
2693 }
2694 return v.typeSlow()
2695 }
2696
2697 func (v Value) typeSlow() Type {
2698 if v.flag == 0 {
2699 panic(&ValueError{"reflect.Value.Type", Invalid})
2700 }
2701
2702 typ := v.typ()
2703 if v.flag&flagMethod == 0 {
2704 return toRType(v.typ())
2705 }
2706
2707
2708
2709 i := int(v.flag) >> flagMethodShift
2710 if v.typ().Kind() == abi.Interface {
2711
2712 tt := (*interfaceType)(unsafe.Pointer(typ))
2713 if uint(i) >= uint(len(tt.Methods)) {
2714 panic("reflect: internal error: invalid method index")
2715 }
2716 m := &tt.Methods[i]
2717 return toRType(typeOffFor(typ, m.Typ))
2718 }
2719
2720 ms := typ.ExportedMethods()
2721 if uint(i) >= uint(len(ms)) {
2722 panic("reflect: internal error: invalid method index")
2723 }
2724 m := ms[i]
2725 return toRType(typeOffFor(typ, m.Mtyp))
2726 }
2727
2728
2729 func (v Value) CanUint() bool {
2730 switch v.kind() {
2731 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2732 return true
2733 default:
2734 return false
2735 }
2736 }
2737
2738
2739
2740 func (v Value) Uint() uint64 {
2741 k := v.kind()
2742 p := v.ptr
2743 switch k {
2744 case Uint:
2745 return uint64(*(*uint)(p))
2746 case Uint8:
2747 return uint64(*(*uint8)(p))
2748 case Uint16:
2749 return uint64(*(*uint16)(p))
2750 case Uint32:
2751 return uint64(*(*uint32)(p))
2752 case Uint64:
2753 return *(*uint64)(p)
2754 case Uintptr:
2755 return uint64(*(*uintptr)(p))
2756 }
2757 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2758 }
2759
2760
2761
2762
2763
2764
2765
2766
2767
2768
2769 func (v Value) UnsafeAddr() uintptr {
2770 if v.typ() == nil {
2771 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2772 }
2773 if v.flag&flagAddr == 0 {
2774 panic("reflect.Value.UnsafeAddr of unaddressable value")
2775 }
2776
2777 escapes(v.ptr)
2778 return uintptr(v.ptr)
2779 }
2780
2781
2782
2783
2784
2785
2786
2787
2788
2789
2790
2791
2792 func (v Value) UnsafePointer() unsafe.Pointer {
2793 k := v.kind()
2794 switch k {
2795 case Pointer:
2796 if v.typ().PtrBytes == 0 {
2797
2798
2799 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2800 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2801 }
2802 return *(*unsafe.Pointer)(v.ptr)
2803 }
2804 fallthrough
2805 case Chan, Map, UnsafePointer:
2806 return v.pointer()
2807 case Func:
2808 if v.flag&flagMethod != 0 {
2809
2810
2811
2812
2813
2814
2815 code := methodValueCallCodePtr()
2816 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2817 }
2818 p := v.pointer()
2819
2820
2821 if p != nil {
2822 p = *(*unsafe.Pointer)(p)
2823 }
2824 return p
2825
2826 case Slice:
2827 return (*unsafeheader.Slice)(v.ptr).Data
2828 }
2829 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2830 }
2831
2832
2833
2834
2835
2836
2837
2838
2839
2840 type StringHeader struct {
2841 Data uintptr
2842 Len int
2843 }
2844
2845
2846
2847
2848
2849
2850
2851
2852
2853 type SliceHeader struct {
2854 Data uintptr
2855 Len int
2856 Cap int
2857 }
2858
2859 func typesMustMatch(what string, t1, t2 Type) {
2860 if t1 != t2 {
2861 panic(what + ": " + t1.String() + " != " + t2.String())
2862 }
2863 }
2864
2865
2866
2867
2868
2869
2870
2871
2872 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2873 return add(p, uintptr(i)*eltSize, "i < len")
2874 }
2875
2876
2877
2878
2879
2880
2881
2882 func (v Value) Grow(n int) {
2883 v.mustBeAssignable()
2884 v.mustBe(Slice)
2885 v.grow(n)
2886 }
2887
2888
2889 func (v Value) grow(n int) {
2890 p := (*unsafeheader.Slice)(v.ptr)
2891 switch {
2892 case n < 0:
2893 panic("reflect.Value.Grow: negative len")
2894 case p.Len+n < 0:
2895 panic("reflect.Value.Grow: slice overflow")
2896 case p.Len+n > p.Cap:
2897 t := v.typ().Elem()
2898 *p = growslice(t, *p, n)
2899 }
2900 }
2901
2902
2903
2904
2905
2906
2907
2908 func (v Value) extendSlice(n int) Value {
2909 v.mustBeExported()
2910 v.mustBe(Slice)
2911
2912
2913 sh := *(*unsafeheader.Slice)(v.ptr)
2914 s := &sh
2915 v.ptr = unsafe.Pointer(s)
2916 v.flag = flagIndir | flag(Slice)
2917
2918 v.grow(n)
2919 s.Len += n
2920 return v
2921 }
2922
2923
2924
2925
2926 func (v Value) Clear() {
2927 switch v.Kind() {
2928 case Slice:
2929 sh := *(*unsafeheader.Slice)(v.ptr)
2930 st := (*sliceType)(unsafe.Pointer(v.typ()))
2931 typedarrayclear(st.Elem, sh.Data, sh.Len)
2932 case Map:
2933 mapclear(v.typ(), v.pointer())
2934 default:
2935 panic(&ValueError{"reflect.Value.Clear", v.Kind()})
2936 }
2937 }
2938
2939
2940
2941 func Append(s Value, x ...Value) Value {
2942 s.mustBe(Slice)
2943 n := s.Len()
2944 s = s.extendSlice(len(x))
2945 for i, v := range x {
2946 s.Index(n + i).Set(v)
2947 }
2948 return s
2949 }
2950
2951
2952
2953 func AppendSlice(s, t Value) Value {
2954 s.mustBe(Slice)
2955 t.mustBe(Slice)
2956 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2957 ns := s.Len()
2958 nt := t.Len()
2959 s = s.extendSlice(nt)
2960 Copy(s.Slice(ns, ns+nt), t)
2961 return s
2962 }
2963
2964
2965
2966
2967
2968
2969
2970
2971 func Copy(dst, src Value) int {
2972 dk := dst.kind()
2973 if dk != Array && dk != Slice {
2974 panic(&ValueError{"reflect.Copy", dk})
2975 }
2976 if dk == Array {
2977 dst.mustBeAssignable()
2978 }
2979 dst.mustBeExported()
2980
2981 sk := src.kind()
2982 var stringCopy bool
2983 if sk != Array && sk != Slice {
2984 stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8
2985 if !stringCopy {
2986 panic(&ValueError{"reflect.Copy", sk})
2987 }
2988 }
2989 src.mustBeExported()
2990
2991 de := dst.typ().Elem()
2992 if !stringCopy {
2993 se := src.typ().Elem()
2994 typesMustMatch("reflect.Copy", toType(de), toType(se))
2995 }
2996
2997 var ds, ss unsafeheader.Slice
2998 if dk == Array {
2999 ds.Data = dst.ptr
3000 ds.Len = dst.Len()
3001 ds.Cap = ds.Len
3002 } else {
3003 ds = *(*unsafeheader.Slice)(dst.ptr)
3004 }
3005 if sk == Array {
3006 ss.Data = src.ptr
3007 ss.Len = src.Len()
3008 ss.Cap = ss.Len
3009 } else if sk == Slice {
3010 ss = *(*unsafeheader.Slice)(src.ptr)
3011 } else {
3012 sh := *(*unsafeheader.String)(src.ptr)
3013 ss.Data = sh.Data
3014 ss.Len = sh.Len
3015 ss.Cap = sh.Len
3016 }
3017
3018 return typedslicecopy(de.Common(), ds, ss)
3019 }
3020
3021
3022
3023 type runtimeSelect struct {
3024 dir SelectDir
3025 typ *rtype
3026 ch unsafe.Pointer
3027 val unsafe.Pointer
3028 }
3029
3030
3031
3032
3033
3034
3035
3036
3037
3038
3039
3040
3041 func rselect([]runtimeSelect) (chosen int, recvOK bool)
3042
3043
3044 type SelectDir int
3045
3046
3047
3048 const (
3049 _ SelectDir = iota
3050 SelectSend
3051 SelectRecv
3052 SelectDefault
3053 )
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070
3071 type SelectCase struct {
3072 Dir SelectDir
3073 Chan Value
3074 Send Value
3075 }
3076
3077
3078
3079
3080
3081
3082
3083
3084
3085 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
3086 if len(cases) > 65536 {
3087 panic("reflect.Select: too many cases (max 65536)")
3088 }
3089
3090
3091
3092 var runcases []runtimeSelect
3093 if len(cases) > 4 {
3094
3095 runcases = make([]runtimeSelect, len(cases))
3096 } else {
3097
3098 runcases = make([]runtimeSelect, len(cases), 4)
3099 }
3100
3101 haveDefault := false
3102 for i, c := range cases {
3103 rc := &runcases[i]
3104 rc.dir = c.Dir
3105 switch c.Dir {
3106 default:
3107 panic("reflect.Select: invalid Dir")
3108
3109 case SelectDefault:
3110 if haveDefault {
3111 panic("reflect.Select: multiple default cases")
3112 }
3113 haveDefault = true
3114 if c.Chan.IsValid() {
3115 panic("reflect.Select: default case has Chan value")
3116 }
3117 if c.Send.IsValid() {
3118 panic("reflect.Select: default case has Send value")
3119 }
3120
3121 case SelectSend:
3122 ch := c.Chan
3123 if !ch.IsValid() {
3124 break
3125 }
3126 ch.mustBe(Chan)
3127 ch.mustBeExported()
3128 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3129 if ChanDir(tt.Dir)&SendDir == 0 {
3130 panic("reflect.Select: SendDir case using recv-only channel")
3131 }
3132 rc.ch = ch.pointer()
3133 rc.typ = toRType(&tt.Type)
3134 v := c.Send
3135 if !v.IsValid() {
3136 panic("reflect.Select: SendDir case missing Send value")
3137 }
3138 v.mustBeExported()
3139 v = v.assignTo("reflect.Select", tt.Elem, nil)
3140 if v.flag&flagIndir != 0 {
3141 rc.val = v.ptr
3142 } else {
3143 rc.val = unsafe.Pointer(&v.ptr)
3144 }
3145
3146
3147 escapes(rc.val)
3148
3149 case SelectRecv:
3150 if c.Send.IsValid() {
3151 panic("reflect.Select: RecvDir case has Send value")
3152 }
3153 ch := c.Chan
3154 if !ch.IsValid() {
3155 break
3156 }
3157 ch.mustBe(Chan)
3158 ch.mustBeExported()
3159 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3160 if ChanDir(tt.Dir)&RecvDir == 0 {
3161 panic("reflect.Select: RecvDir case using send-only channel")
3162 }
3163 rc.ch = ch.pointer()
3164 rc.typ = toRType(&tt.Type)
3165 rc.val = unsafe_New(tt.Elem)
3166 }
3167 }
3168
3169 chosen, recvOK = rselect(runcases)
3170 if runcases[chosen].dir == SelectRecv {
3171 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
3172 t := tt.Elem
3173 p := runcases[chosen].val
3174 fl := flag(t.Kind())
3175 if t.IfaceIndir() {
3176 recv = Value{t, p, fl | flagIndir}
3177 } else {
3178 recv = Value{t, *(*unsafe.Pointer)(p), fl}
3179 }
3180 }
3181 return chosen, recv, recvOK
3182 }
3183
3184
3187
3188
3189
3190
3191 func unsafe_New(*abi.Type) unsafe.Pointer
3192
3193
3194 func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
3195
3196
3197
3198 func MakeSlice(typ Type, len, cap int) Value {
3199 if typ.Kind() != Slice {
3200 panic("reflect.MakeSlice of non-slice type")
3201 }
3202 if len < 0 {
3203 panic("reflect.MakeSlice: negative len")
3204 }
3205 if cap < 0 {
3206 panic("reflect.MakeSlice: negative cap")
3207 }
3208 if len > cap {
3209 panic("reflect.MakeSlice: len > cap")
3210 }
3211
3212 s := unsafeheader.Slice{Data: unsafe_NewArray(&(typ.Elem().(*rtype).t), cap), Len: len, Cap: cap}
3213 return Value{&typ.(*rtype).t, unsafe.Pointer(&s), flagIndir | flag(Slice)}
3214 }
3215
3216
3217 func MakeChan(typ Type, buffer int) Value {
3218 if typ.Kind() != Chan {
3219 panic("reflect.MakeChan of non-chan type")
3220 }
3221 if buffer < 0 {
3222 panic("reflect.MakeChan: negative buffer size")
3223 }
3224 if typ.ChanDir() != BothDir {
3225 panic("reflect.MakeChan: unidirectional channel type")
3226 }
3227 t := typ.common()
3228 ch := makechan(t, buffer)
3229 return Value{t, ch, flag(Chan)}
3230 }
3231
3232
3233 func MakeMap(typ Type) Value {
3234 return MakeMapWithSize(typ, 0)
3235 }
3236
3237
3238
3239 func MakeMapWithSize(typ Type, n int) Value {
3240 if typ.Kind() != Map {
3241 panic("reflect.MakeMapWithSize of non-map type")
3242 }
3243 t := typ.common()
3244 m := makemap(t, n)
3245 return Value{t, m, flag(Map)}
3246 }
3247
3248
3249
3250
3251 func Indirect(v Value) Value {
3252 if v.Kind() != Pointer {
3253 return v
3254 }
3255 return v.Elem()
3256 }
3257
3258
3259
3260 func ValueOf(i any) Value {
3261 if i == nil {
3262 return Value{}
3263 }
3264 return unpackEface(i)
3265 }
3266
3267
3268
3269
3270
3271
3272 func Zero(typ Type) Value {
3273 if typ == nil {
3274 panic("reflect: Zero(nil)")
3275 }
3276 t := &typ.(*rtype).t
3277 fl := flag(t.Kind())
3278 if t.IfaceIndir() {
3279 var p unsafe.Pointer
3280 if t.Size() <= abi.ZeroValSize {
3281 p = unsafe.Pointer(&zeroVal[0])
3282 } else {
3283 p = unsafe_New(t)
3284 }
3285 return Value{t, p, fl | flagIndir}
3286 }
3287 return Value{t, nil, fl}
3288 }
3289
3290
3291 var zeroVal [abi.ZeroValSize]byte
3292
3293
3294
3295 func New(typ Type) Value {
3296 if typ == nil {
3297 panic("reflect: New(nil)")
3298 }
3299 t := &typ.(*rtype).t
3300 pt := ptrTo(t)
3301 if ifaceIndir(pt) {
3302
3303 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3304 }
3305 ptr := unsafe_New(t)
3306 fl := flag(Pointer)
3307 return Value{pt, ptr, fl}
3308 }
3309
3310
3311
3312 func NewAt(typ Type, p unsafe.Pointer) Value {
3313 fl := flag(Pointer)
3314 t := typ.(*rtype)
3315 return Value{t.ptrTo(), p, fl}
3316 }
3317
3318
3319
3320
3321
3322
3323 func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Value {
3324 if v.flag&flagMethod != 0 {
3325 v = makeMethodValue(context, v)
3326 }
3327
3328 switch {
3329 case directlyAssignable(dst, v.typ()):
3330
3331
3332 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3333 fl |= flag(dst.Kind())
3334 return Value{dst, v.ptr, fl}
3335
3336 case implements(dst, v.typ()):
3337 if v.Kind() == Interface && v.IsNil() {
3338
3339
3340
3341 return Value{dst, nil, flag(Interface)}
3342 }
3343 x := valueInterface(v, false)
3344 if target == nil {
3345 target = unsafe_New(dst)
3346 }
3347 if dst.NumMethod() == 0 {
3348 *(*any)(target) = x
3349 } else {
3350 ifaceE2I(dst, x, target)
3351 }
3352 return Value{dst, target, flagIndir | flag(Interface)}
3353 }
3354
3355
3356 panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst))
3357 }
3358
3359
3360
3361
3362 func (v Value) Convert(t Type) Value {
3363 if v.flag&flagMethod != 0 {
3364 v = makeMethodValue("Convert", v)
3365 }
3366 op := convertOp(t.common(), v.typ())
3367 if op == nil {
3368 panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String())
3369 }
3370 return op(v, t)
3371 }
3372
3373
3374
3375 func (v Value) CanConvert(t Type) bool {
3376 vt := v.Type()
3377 if !vt.ConvertibleTo(t) {
3378 return false
3379 }
3380
3381
3382 switch {
3383 case vt.Kind() == Slice && t.Kind() == Array:
3384 if t.Len() > v.Len() {
3385 return false
3386 }
3387 case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array:
3388 n := t.Elem().Len()
3389 if n > v.Len() {
3390 return false
3391 }
3392 }
3393 return true
3394 }
3395
3396
3397
3398
3399
3400 func (v Value) Comparable() bool {
3401 k := v.Kind()
3402 switch k {
3403 case Invalid:
3404 return false
3405
3406 case Array:
3407 switch v.Type().Elem().Kind() {
3408 case Interface, Array, Struct:
3409 for i := 0; i < v.Type().Len(); i++ {
3410 if !v.Index(i).Comparable() {
3411 return false
3412 }
3413 }
3414 return true
3415 }
3416 return v.Type().Comparable()
3417
3418 case Interface:
3419 return v.Elem().Comparable()
3420
3421 case Struct:
3422 for i := 0; i < v.NumField(); i++ {
3423 if !v.Field(i).Comparable() {
3424 return false
3425 }
3426 }
3427 return true
3428
3429 default:
3430 return v.Type().Comparable()
3431 }
3432 }
3433
3434
3435
3436
3437
3438
3439
3440
3441
3442 func (v Value) Equal(u Value) bool {
3443 if v.Kind() == Interface {
3444 v = v.Elem()
3445 }
3446 if u.Kind() == Interface {
3447 u = u.Elem()
3448 }
3449
3450 if !v.IsValid() || !u.IsValid() {
3451 return v.IsValid() == u.IsValid()
3452 }
3453
3454 if v.Kind() != u.Kind() || v.Type() != u.Type() {
3455 return false
3456 }
3457
3458
3459
3460 switch v.Kind() {
3461 default:
3462 panic("reflect.Value.Equal: invalid Kind")
3463 case Bool:
3464 return v.Bool() == u.Bool()
3465 case Int, Int8, Int16, Int32, Int64:
3466 return v.Int() == u.Int()
3467 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3468 return v.Uint() == u.Uint()
3469 case Float32, Float64:
3470 return v.Float() == u.Float()
3471 case Complex64, Complex128:
3472 return v.Complex() == u.Complex()
3473 case String:
3474 return v.String() == u.String()
3475 case Chan, Pointer, UnsafePointer:
3476 return v.Pointer() == u.Pointer()
3477 case Array:
3478
3479 vl := v.Len()
3480 if vl == 0 {
3481
3482 if !v.Type().Elem().Comparable() {
3483 break
3484 }
3485 return true
3486 }
3487 for i := 0; i < vl; i++ {
3488 if !v.Index(i).Equal(u.Index(i)) {
3489 return false
3490 }
3491 }
3492 return true
3493 case Struct:
3494
3495 nf := v.NumField()
3496 for i := 0; i < nf; i++ {
3497 if !v.Field(i).Equal(u.Field(i)) {
3498 return false
3499 }
3500 }
3501 return true
3502 case Func, Map, Slice:
3503 break
3504 }
3505 panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable")
3506 }
3507
3508
3509
3510 func convertOp(dst, src *abi.Type) func(Value, Type) Value {
3511 switch Kind(src.Kind()) {
3512 case Int, Int8, Int16, Int32, Int64:
3513 switch Kind(dst.Kind()) {
3514 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3515 return cvtInt
3516 case Float32, Float64:
3517 return cvtIntFloat
3518 case String:
3519 return cvtIntString
3520 }
3521
3522 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3523 switch Kind(dst.Kind()) {
3524 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3525 return cvtUint
3526 case Float32, Float64:
3527 return cvtUintFloat
3528 case String:
3529 return cvtUintString
3530 }
3531
3532 case Float32, Float64:
3533 switch Kind(dst.Kind()) {
3534 case Int, Int8, Int16, Int32, Int64:
3535 return cvtFloatInt
3536 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3537 return cvtFloatUint
3538 case Float32, Float64:
3539 return cvtFloat
3540 }
3541
3542 case Complex64, Complex128:
3543 switch Kind(dst.Kind()) {
3544 case Complex64, Complex128:
3545 return cvtComplex
3546 }
3547
3548 case String:
3549 if dst.Kind() == abi.Slice && pkgPathFor(dst.Elem()) == "" {
3550 switch Kind(dst.Elem().Kind()) {
3551 case Uint8:
3552 return cvtStringBytes
3553 case Int32:
3554 return cvtStringRunes
3555 }
3556 }
3557
3558 case Slice:
3559 if dst.Kind() == abi.String && pkgPathFor(src.Elem()) == "" {
3560 switch Kind(src.Elem().Kind()) {
3561 case Uint8:
3562 return cvtBytesString
3563 case Int32:
3564 return cvtRunesString
3565 }
3566 }
3567
3568
3569 if dst.Kind() == abi.Pointer && dst.Elem().Kind() == abi.Array && src.Elem() == dst.Elem().Elem() {
3570 return cvtSliceArrayPtr
3571 }
3572
3573
3574 if dst.Kind() == abi.Array && src.Elem() == dst.Elem() {
3575 return cvtSliceArray
3576 }
3577
3578 case Chan:
3579 if dst.Kind() == abi.Chan && specialChannelAssignability(dst, src) {
3580 return cvtDirect
3581 }
3582 }
3583
3584
3585 if haveIdenticalUnderlyingType(dst, src, false) {
3586 return cvtDirect
3587 }
3588
3589
3590 if dst.Kind() == abi.Pointer && nameFor(dst) == "" &&
3591 src.Kind() == abi.Pointer && nameFor(src) == "" &&
3592 haveIdenticalUnderlyingType(elem(dst), elem(src), false) {
3593 return cvtDirect
3594 }
3595
3596 if implements(dst, src) {
3597 if src.Kind() == abi.Interface {
3598 return cvtI2I
3599 }
3600 return cvtT2I
3601 }
3602
3603 return nil
3604 }
3605
3606
3607
3608 func makeInt(f flag, bits uint64, t Type) Value {
3609 typ := t.common()
3610 ptr := unsafe_New(typ)
3611 switch typ.Size() {
3612 case 1:
3613 *(*uint8)(ptr) = uint8(bits)
3614 case 2:
3615 *(*uint16)(ptr) = uint16(bits)
3616 case 4:
3617 *(*uint32)(ptr) = uint32(bits)
3618 case 8:
3619 *(*uint64)(ptr) = bits
3620 }
3621 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3622 }
3623
3624
3625
3626 func makeFloat(f flag, v float64, t Type) Value {
3627 typ := t.common()
3628 ptr := unsafe_New(typ)
3629 switch typ.Size() {
3630 case 4:
3631 *(*float32)(ptr) = float32(v)
3632 case 8:
3633 *(*float64)(ptr) = v
3634 }
3635 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3636 }
3637
3638
3639 func makeFloat32(f flag, v float32, t Type) Value {
3640 typ := t.common()
3641 ptr := unsafe_New(typ)
3642 *(*float32)(ptr) = v
3643 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3644 }
3645
3646
3647
3648 func makeComplex(f flag, v complex128, t Type) Value {
3649 typ := t.common()
3650 ptr := unsafe_New(typ)
3651 switch typ.Size() {
3652 case 8:
3653 *(*complex64)(ptr) = complex64(v)
3654 case 16:
3655 *(*complex128)(ptr) = v
3656 }
3657 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3658 }
3659
3660 func makeString(f flag, v string, t Type) Value {
3661 ret := New(t).Elem()
3662 ret.SetString(v)
3663 ret.flag = ret.flag&^flagAddr | f
3664 return ret
3665 }
3666
3667 func makeBytes(f flag, v []byte, t Type) Value {
3668 ret := New(t).Elem()
3669 ret.SetBytes(v)
3670 ret.flag = ret.flag&^flagAddr | f
3671 return ret
3672 }
3673
3674 func makeRunes(f flag, v []rune, t Type) Value {
3675 ret := New(t).Elem()
3676 ret.setRunes(v)
3677 ret.flag = ret.flag&^flagAddr | f
3678 return ret
3679 }
3680
3681
3682
3683
3684
3685
3686
3687 func cvtInt(v Value, t Type) Value {
3688 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3689 }
3690
3691
3692 func cvtUint(v Value, t Type) Value {
3693 return makeInt(v.flag.ro(), v.Uint(), t)
3694 }
3695
3696
3697 func cvtFloatInt(v Value, t Type) Value {
3698 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3699 }
3700
3701
3702 func cvtFloatUint(v Value, t Type) Value {
3703 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3704 }
3705
3706
3707 func cvtIntFloat(v Value, t Type) Value {
3708 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3709 }
3710
3711
3712 func cvtUintFloat(v Value, t Type) Value {
3713 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3714 }
3715
3716
3717 func cvtFloat(v Value, t Type) Value {
3718 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3719
3720
3721
3722 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3723 }
3724 return makeFloat(v.flag.ro(), v.Float(), t)
3725 }
3726
3727
3728 func cvtComplex(v Value, t Type) Value {
3729 return makeComplex(v.flag.ro(), v.Complex(), t)
3730 }
3731
3732
3733 func cvtIntString(v Value, t Type) Value {
3734 s := "\uFFFD"
3735 if x := v.Int(); int64(rune(x)) == x {
3736 s = string(rune(x))
3737 }
3738 return makeString(v.flag.ro(), s, t)
3739 }
3740
3741
3742 func cvtUintString(v Value, t Type) Value {
3743 s := "\uFFFD"
3744 if x := v.Uint(); uint64(rune(x)) == x {
3745 s = string(rune(x))
3746 }
3747 return makeString(v.flag.ro(), s, t)
3748 }
3749
3750
3751 func cvtBytesString(v Value, t Type) Value {
3752 return makeString(v.flag.ro(), string(v.Bytes()), t)
3753 }
3754
3755
3756 func cvtStringBytes(v Value, t Type) Value {
3757 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3758 }
3759
3760
3761 func cvtRunesString(v Value, t Type) Value {
3762 return makeString(v.flag.ro(), string(v.runes()), t)
3763 }
3764
3765
3766 func cvtStringRunes(v Value, t Type) Value {
3767 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3768 }
3769
3770
3771 func cvtSliceArrayPtr(v Value, t Type) Value {
3772 n := t.Elem().Len()
3773 if n > v.Len() {
3774 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
3775 }
3776 h := (*unsafeheader.Slice)(v.ptr)
3777 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3778 }
3779
3780
3781 func cvtSliceArray(v Value, t Type) Value {
3782 n := t.Len()
3783 if n > v.Len() {
3784 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to array with length " + itoa.Itoa(n))
3785 }
3786 h := (*unsafeheader.Slice)(v.ptr)
3787 typ := t.common()
3788 ptr := h.Data
3789 c := unsafe_New(typ)
3790 typedmemmove(typ, c, ptr)
3791 ptr = c
3792
3793 return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)}
3794 }
3795
3796
3797 func cvtDirect(v Value, typ Type) Value {
3798 f := v.flag
3799 t := typ.common()
3800 ptr := v.ptr
3801 if f&flagAddr != 0 {
3802
3803 c := unsafe_New(t)
3804 typedmemmove(t, c, ptr)
3805 ptr = c
3806 f &^= flagAddr
3807 }
3808 return Value{t, ptr, v.flag.ro() | f}
3809 }
3810
3811
3812 func cvtT2I(v Value, typ Type) Value {
3813 target := unsafe_New(typ.common())
3814 x := valueInterface(v, false)
3815 if typ.NumMethod() == 0 {
3816 *(*any)(target) = x
3817 } else {
3818 ifaceE2I(typ.common(), x, target)
3819 }
3820 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3821 }
3822
3823
3824 func cvtI2I(v Value, typ Type) Value {
3825 if v.IsNil() {
3826 ret := Zero(typ)
3827 ret.flag |= v.flag.ro()
3828 return ret
3829 }
3830 return cvtT2I(v.Elem(), typ)
3831 }
3832
3833
3834
3835
3836 func chancap(ch unsafe.Pointer) int
3837
3838
3839 func chanclose(ch unsafe.Pointer)
3840
3841
3842 func chanlen(ch unsafe.Pointer) int
3843
3844
3845
3846
3847
3848
3849
3850
3851
3852 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3853
3854
3855 func chansend0(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3856
3857 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool {
3858 contentEscapes(val)
3859 return chansend0(ch, val, nb)
3860 }
3861
3862 func makechan(typ *abi.Type, size int) (ch unsafe.Pointer)
3863 func makemap(t *abi.Type, cap int) (m unsafe.Pointer)
3864
3865
3866 func mapaccess(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3867
3868
3869 func mapaccess_faststr(t *abi.Type, m unsafe.Pointer, key string) (val unsafe.Pointer)
3870
3871
3872 func mapassign0(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer)
3873
3874 func mapassign(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) {
3875 contentEscapes(key)
3876 contentEscapes(val)
3877 mapassign0(t, m, key, val)
3878 }
3879
3880
3881 func mapassign_faststr0(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer)
3882
3883 func mapassign_faststr(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) {
3884 contentEscapes((*unsafeheader.String)(unsafe.Pointer(&key)).Data)
3885 contentEscapes(val)
3886 mapassign_faststr0(t, m, key, val)
3887 }
3888
3889
3890 func mapdelete(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer)
3891
3892
3893 func mapdelete_faststr(t *abi.Type, m unsafe.Pointer, key string)
3894
3895
3896 func mapiterinit(t *abi.Type, m unsafe.Pointer, it *hiter)
3897
3898
3899 func mapiterkey(it *hiter) (key unsafe.Pointer)
3900
3901
3902 func mapiterelem(it *hiter) (elem unsafe.Pointer)
3903
3904
3905 func mapiternext(it *hiter)
3906
3907
3908 func maplen(m unsafe.Pointer) int
3909
3910 func mapclear(t *abi.Type, m unsafe.Pointer)
3911
3912
3913
3914
3915
3916
3917
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938 func call(stackArgsType *abi.Type, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3939
3940 func ifaceE2I(t *abi.Type, src any, dst unsafe.Pointer)
3941
3942
3943
3944
3945 func memmove(dst, src unsafe.Pointer, size uintptr)
3946
3947
3948
3949
3950 func typedmemmove(t *abi.Type, dst, src unsafe.Pointer)
3951
3952
3953
3954
3955 func typedmemclr(t *abi.Type, ptr unsafe.Pointer)
3956
3957
3958
3959
3960
3961 func typedmemclrpartial(t *abi.Type, ptr unsafe.Pointer, off, size uintptr)
3962
3963
3964
3965
3966
3967 func typedslicecopy(t *abi.Type, dst, src unsafeheader.Slice) int
3968
3969
3970
3971
3972
3973 func typedarrayclear(elemType *abi.Type, ptr unsafe.Pointer, len int)
3974
3975
3976 func typehash(t *abi.Type, p unsafe.Pointer, h uintptr) uintptr
3977
3978 func verifyNotInHeapPtr(p uintptr) bool
3979
3980
3981 func growslice(t *abi.Type, old unsafeheader.Slice, num int) unsafeheader.Slice
3982
3983
3984
3985
3986 func escapes(x any) {
3987 if dummy.b {
3988 dummy.x = x
3989 }
3990 }
3991
3992 var dummy struct {
3993 b bool
3994 x any
3995 }
3996
3997
3998
3999
4000
4001 func contentEscapes(x unsafe.Pointer) {
4002 if dummy.b {
4003 escapes(*(*any)(x))
4004 }
4005 }
4006
4007
4008 func noescape(p unsafe.Pointer) unsafe.Pointer {
4009 x := uintptr(p)
4010 return unsafe.Pointer(x ^ 0)
4011 }
4012
View as plain text