Source file
src/reflect/value.go
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/itoa"
12 "internal/unsafeheader"
13 "math"
14 "runtime"
15 "unsafe"
16 )
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41
42 typ_ *abi.Type
43
44
45
46 ptr unsafe.Pointer
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62 flag
63
64
65
66
67
68
69 }
70
71 type flag uintptr
72
73 const (
74 flagKindWidth = 5
75 flagKindMask flag = 1<<flagKindWidth - 1
76 flagStickyRO flag = 1 << 5
77 flagEmbedRO flag = 1 << 6
78 flagIndir flag = 1 << 7
79 flagAddr flag = 1 << 8
80 flagMethod flag = 1 << 9
81 flagMethodShift = 10
82 flagRO flag = flagStickyRO | flagEmbedRO
83 )
84
85 func (f flag) kind() Kind {
86 return Kind(f & flagKindMask)
87 }
88
89 func (f flag) ro() flag {
90 if f&flagRO != 0 {
91 return flagStickyRO
92 }
93 return 0
94 }
95
96 func (v Value) typ() *abi.Type {
97
98
99
100
101
102 return (*abi.Type)(noescape(unsafe.Pointer(v.typ_)))
103 }
104
105
106
107
108 func (v Value) pointer() unsafe.Pointer {
109 if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() {
110 panic("can't call pointer on a non-pointer Value")
111 }
112 if v.flag&flagIndir != 0 {
113 return *(*unsafe.Pointer)(v.ptr)
114 }
115 return v.ptr
116 }
117
118
119 func packEface(v Value) any {
120 t := v.typ()
121 var i any
122 e := (*emptyInterface)(unsafe.Pointer(&i))
123
124 switch {
125 case t.IfaceIndir():
126 if v.flag&flagIndir == 0 {
127 panic("bad indir")
128 }
129
130 ptr := v.ptr
131 if v.flag&flagAddr != 0 {
132
133
134 c := unsafe_New(t)
135 typedmemmove(t, c, ptr)
136 ptr = c
137 }
138 e.word = ptr
139 case v.flag&flagIndir != 0:
140
141
142 e.word = *(*unsafe.Pointer)(v.ptr)
143 default:
144
145 e.word = v.ptr
146 }
147
148
149
150
151 e.typ = t
152 return i
153 }
154
155
156 func unpackEface(i any) Value {
157 e := (*emptyInterface)(unsafe.Pointer(&i))
158
159 t := e.typ
160 if t == nil {
161 return Value{}
162 }
163 f := flag(t.Kind())
164 if t.IfaceIndir() {
165 f |= flagIndir
166 }
167 return Value{t, e.word, f}
168 }
169
170
171
172
173 type ValueError struct {
174 Method string
175 Kind Kind
176 }
177
178 func (e *ValueError) Error() string {
179 if e.Kind == 0 {
180 return "reflect: call of " + e.Method + " on zero Value"
181 }
182 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
183 }
184
185
186 func valueMethodName() string {
187 var pc [5]uintptr
188 n := runtime.Callers(1, pc[:])
189 frames := runtime.CallersFrames(pc[:n])
190 var frame runtime.Frame
191 for more := true; more; {
192 const prefix = "reflect.Value."
193 frame, more = frames.Next()
194 name := frame.Function
195 if len(name) > len(prefix) && name[:len(prefix)] == prefix {
196 methodName := name[len(prefix):]
197 if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' {
198 return name
199 }
200 }
201 }
202 return "unknown method"
203 }
204
205
206 type emptyInterface struct {
207 typ *abi.Type
208 word unsafe.Pointer
209 }
210
211
212 type nonEmptyInterface struct {
213
214 itab *struct {
215 ityp *abi.Type
216 typ *abi.Type
217 hash uint32
218 _ [4]byte
219 fun [100000]unsafe.Pointer
220 }
221 word unsafe.Pointer
222 }
223
224
225
226
227
228
229
230 func (f flag) mustBe(expected Kind) {
231
232 if Kind(f&flagKindMask) != expected {
233 panic(&ValueError{valueMethodName(), f.kind()})
234 }
235 }
236
237
238
239 func (f flag) mustBeExported() {
240 if f == 0 || f&flagRO != 0 {
241 f.mustBeExportedSlow()
242 }
243 }
244
245 func (f flag) mustBeExportedSlow() {
246 if f == 0 {
247 panic(&ValueError{valueMethodName(), Invalid})
248 }
249 if f&flagRO != 0 {
250 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
251 }
252 }
253
254
255
256
257 func (f flag) mustBeAssignable() {
258 if f&flagRO != 0 || f&flagAddr == 0 {
259 f.mustBeAssignableSlow()
260 }
261 }
262
263 func (f flag) mustBeAssignableSlow() {
264 if f == 0 {
265 panic(&ValueError{valueMethodName(), Invalid})
266 }
267
268 if f&flagRO != 0 {
269 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
270 }
271 if f&flagAddr == 0 {
272 panic("reflect: " + valueMethodName() + " using unaddressable value")
273 }
274 }
275
276
277
278
279
280
281 func (v Value) Addr() Value {
282 if v.flag&flagAddr == 0 {
283 panic("reflect.Value.Addr of unaddressable value")
284 }
285
286
287 fl := v.flag & flagRO
288 return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)}
289 }
290
291
292
293 func (v Value) Bool() bool {
294
295 if v.kind() != Bool {
296 v.panicNotBool()
297 }
298 return *(*bool)(v.ptr)
299 }
300
301 func (v Value) panicNotBool() {
302 v.mustBe(Bool)
303 }
304
305 var bytesType = rtypeOf(([]byte)(nil))
306
307
308
309
310 func (v Value) Bytes() []byte {
311
312 if v.typ_ == bytesType {
313 return *(*[]byte)(v.ptr)
314 }
315 return v.bytesSlow()
316 }
317
318 func (v Value) bytesSlow() []byte {
319 switch v.kind() {
320 case Slice:
321 if v.typ().Elem().Kind() != abi.Uint8 {
322 panic("reflect.Value.Bytes of non-byte slice")
323 }
324
325 return *(*[]byte)(v.ptr)
326 case Array:
327 if v.typ().Elem().Kind() != abi.Uint8 {
328 panic("reflect.Value.Bytes of non-byte array")
329 }
330 if !v.CanAddr() {
331 panic("reflect.Value.Bytes of unaddressable byte array")
332 }
333 p := (*byte)(v.ptr)
334 n := int((*arrayType)(unsafe.Pointer(v.typ())).Len)
335 return unsafe.Slice(p, n)
336 }
337 panic(&ValueError{"reflect.Value.Bytes", v.kind()})
338 }
339
340
341
342 func (v Value) runes() []rune {
343 v.mustBe(Slice)
344 if v.typ().Elem().Kind() != abi.Int32 {
345 panic("reflect.Value.Bytes of non-rune slice")
346 }
347
348 return *(*[]rune)(v.ptr)
349 }
350
351
352
353
354
355
356 func (v Value) CanAddr() bool {
357 return v.flag&flagAddr != 0
358 }
359
360
361
362
363
364
365 func (v Value) CanSet() bool {
366 return v.flag&(flagAddr|flagRO) == flagAddr
367 }
368
369
370
371
372
373
374
375
376
377 func (v Value) Call(in []Value) []Value {
378 v.mustBe(Func)
379 v.mustBeExported()
380 return v.call("Call", in)
381 }
382
383
384
385
386
387
388
389
390 func (v Value) CallSlice(in []Value) []Value {
391 v.mustBe(Func)
392 v.mustBeExported()
393 return v.call("CallSlice", in)
394 }
395
396 var callGC bool
397
398 const debugReflectCall = false
399
400 func (v Value) call(op string, in []Value) []Value {
401
402 t := (*funcType)(unsafe.Pointer(v.typ()))
403 var (
404 fn unsafe.Pointer
405 rcvr Value
406 rcvrtype *abi.Type
407 )
408 if v.flag&flagMethod != 0 {
409 rcvr = v
410 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
411 } else if v.flag&flagIndir != 0 {
412 fn = *(*unsafe.Pointer)(v.ptr)
413 } else {
414 fn = v.ptr
415 }
416
417 if fn == nil {
418 panic("reflect.Value.Call: call of nil function")
419 }
420
421 isSlice := op == "CallSlice"
422 n := t.NumIn()
423 isVariadic := t.IsVariadic()
424 if isSlice {
425 if !isVariadic {
426 panic("reflect: CallSlice of non-variadic function")
427 }
428 if len(in) < n {
429 panic("reflect: CallSlice with too few input arguments")
430 }
431 if len(in) > n {
432 panic("reflect: CallSlice with too many input arguments")
433 }
434 } else {
435 if isVariadic {
436 n--
437 }
438 if len(in) < n {
439 panic("reflect: Call with too few input arguments")
440 }
441 if !isVariadic && len(in) > n {
442 panic("reflect: Call with too many input arguments")
443 }
444 }
445 for _, x := range in {
446 if x.Kind() == Invalid {
447 panic("reflect: " + op + " using zero Value argument")
448 }
449 }
450 for i := 0; i < n; i++ {
451 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(toRType(targ)) {
452 panic("reflect: " + op + " using " + xt.String() + " as type " + stringFor(targ))
453 }
454 }
455 if !isSlice && isVariadic {
456
457 m := len(in) - n
458 slice := MakeSlice(toRType(t.In(n)), m, m)
459 elem := toRType(t.In(n)).Elem()
460 for i := 0; i < m; i++ {
461 x := in[n+i]
462 if xt := x.Type(); !xt.AssignableTo(elem) {
463 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
464 }
465 slice.Index(i).Set(x)
466 }
467 origIn := in
468 in = make([]Value, n+1)
469 copy(in[:n], origIn)
470 in[n] = slice
471 }
472
473 nin := len(in)
474 if nin != t.NumIn() {
475 panic("reflect.Value.Call: wrong argument count")
476 }
477 nout := t.NumOut()
478
479
480 var regArgs abi.RegArgs
481
482
483 frametype, framePool, abid := funcLayout(t, rcvrtype)
484
485
486 var stackArgs unsafe.Pointer
487 if frametype.Size() != 0 {
488 if nout == 0 {
489 stackArgs = framePool.Get().(unsafe.Pointer)
490 } else {
491
492
493 stackArgs = unsafe_New(frametype)
494 }
495 }
496 frameSize := frametype.Size()
497
498 if debugReflectCall {
499 println("reflect.call", stringFor(&t.Type))
500 abid.dump()
501 }
502
503
504
505
506 inStart := 0
507 if rcvrtype != nil {
508
509
510
511 switch st := abid.call.steps[0]; st.kind {
512 case abiStepStack:
513 storeRcvr(rcvr, stackArgs)
514 case abiStepPointer:
515 storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg]))
516 fallthrough
517 case abiStepIntReg:
518 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
519 case abiStepFloatReg:
520 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
521 default:
522 panic("unknown ABI parameter kind")
523 }
524 inStart = 1
525 }
526
527
528 for i, v := range in {
529 v.mustBeExported()
530 targ := toRType(t.In(i))
531
532
533
534 v = v.assignTo("reflect.Value.Call", &targ.t, nil)
535 stepsLoop:
536 for _, st := range abid.call.stepsForValue(i + inStart) {
537 switch st.kind {
538 case abiStepStack:
539
540 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
541 if v.flag&flagIndir != 0 {
542 typedmemmove(&targ.t, addr, v.ptr)
543 } else {
544 *(*unsafe.Pointer)(addr) = v.ptr
545 }
546
547 break stepsLoop
548 case abiStepIntReg, abiStepPointer:
549
550 if v.flag&flagIndir != 0 {
551 offset := add(v.ptr, st.offset, "precomputed value offset")
552 if st.kind == abiStepPointer {
553
554
555
556 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
557 }
558 intToReg(®Args, st.ireg, st.size, offset)
559 } else {
560 if st.kind == abiStepPointer {
561
562 regArgs.Ptrs[st.ireg] = v.ptr
563 }
564 regArgs.Ints[st.ireg] = uintptr(v.ptr)
565 }
566 case abiStepFloatReg:
567
568 if v.flag&flagIndir == 0 {
569 panic("attempted to copy pointer to FP register")
570 }
571 offset := add(v.ptr, st.offset, "precomputed value offset")
572 floatToReg(®Args, st.freg, st.size, offset)
573 default:
574 panic("unknown ABI part kind")
575 }
576 }
577 }
578
579
580 frameSize = align(frameSize, goarch.PtrSize)
581 frameSize += abid.spill
582
583
584 regArgs.ReturnIsPtr = abid.outRegPtrs
585
586 if debugReflectCall {
587 regArgs.Dump()
588 }
589
590
591 if callGC {
592 runtime.GC()
593 }
594
595
596 call(frametype, fn, stackArgs, uint32(frametype.Size()), uint32(abid.retOffset), uint32(frameSize), ®Args)
597
598
599 if callGC {
600 runtime.GC()
601 }
602
603 var ret []Value
604 if nout == 0 {
605 if stackArgs != nil {
606 typedmemclr(frametype, stackArgs)
607 framePool.Put(stackArgs)
608 }
609 } else {
610 if stackArgs != nil {
611
612
613
614 typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset)
615 }
616
617
618 ret = make([]Value, nout)
619 for i := 0; i < nout; i++ {
620 tv := t.Out(i)
621 if tv.Size() == 0 {
622
623
624 ret[i] = Zero(toRType(tv))
625 continue
626 }
627 steps := abid.ret.stepsForValue(i)
628 if st := steps[0]; st.kind == abiStepStack {
629
630
631
632 fl := flagIndir | flag(tv.Kind())
633 ret[i] = Value{tv, add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
634
635
636
637
638 continue
639 }
640
641
642 if !ifaceIndir(tv) {
643
644
645 if steps[0].kind != abiStepPointer {
646 print("kind=", steps[0].kind, ", type=", stringFor(tv), "\n")
647 panic("mismatch between ABI description and types")
648 }
649 ret[i] = Value{tv, regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
650 continue
651 }
652
653
654
655
656
657
658
659
660
661
662 s := unsafe_New(tv)
663 for _, st := range steps {
664 switch st.kind {
665 case abiStepIntReg:
666 offset := add(s, st.offset, "precomputed value offset")
667 intFromReg(®Args, st.ireg, st.size, offset)
668 case abiStepPointer:
669 s := add(s, st.offset, "precomputed value offset")
670 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
671 case abiStepFloatReg:
672 offset := add(s, st.offset, "precomputed value offset")
673 floatFromReg(®Args, st.freg, st.size, offset)
674 case abiStepStack:
675 panic("register-based return value has stack component")
676 default:
677 panic("unknown ABI part kind")
678 }
679 }
680 ret[i] = Value{tv, s, flagIndir | flag(tv.Kind())}
681 }
682 }
683
684 return ret
685 }
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
708 if callGC {
709
710
711
712
713
714 runtime.GC()
715 }
716 ftyp := ctxt.ftyp
717 f := ctxt.fn
718
719 _, _, abid := funcLayout(ftyp, nil)
720
721
722 ptr := frame
723 in := make([]Value, 0, int(ftyp.InCount))
724 for i, typ := range ftyp.InSlice() {
725 if typ.Size() == 0 {
726 in = append(in, Zero(toRType(typ)))
727 continue
728 }
729 v := Value{typ, nil, flag(typ.Kind())}
730 steps := abid.call.stepsForValue(i)
731 if st := steps[0]; st.kind == abiStepStack {
732 if ifaceIndir(typ) {
733
734
735
736
737 v.ptr = unsafe_New(typ)
738 if typ.Size() > 0 {
739 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
740 }
741 v.flag |= flagIndir
742 } else {
743 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
744 }
745 } else {
746 if ifaceIndir(typ) {
747
748
749 v.flag |= flagIndir
750 v.ptr = unsafe_New(typ)
751 for _, st := range steps {
752 switch st.kind {
753 case abiStepIntReg:
754 offset := add(v.ptr, st.offset, "precomputed value offset")
755 intFromReg(regs, st.ireg, st.size, offset)
756 case abiStepPointer:
757 s := add(v.ptr, st.offset, "precomputed value offset")
758 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
759 case abiStepFloatReg:
760 offset := add(v.ptr, st.offset, "precomputed value offset")
761 floatFromReg(regs, st.freg, st.size, offset)
762 case abiStepStack:
763 panic("register-based return value has stack component")
764 default:
765 panic("unknown ABI part kind")
766 }
767 }
768 } else {
769
770
771 if steps[0].kind != abiStepPointer {
772 print("kind=", steps[0].kind, ", type=", stringFor(typ), "\n")
773 panic("mismatch between ABI description and types")
774 }
775 v.ptr = regs.Ptrs[steps[0].ireg]
776 }
777 }
778 in = append(in, v)
779 }
780
781
782 out := f(in)
783 numOut := ftyp.NumOut()
784 if len(out) != numOut {
785 panic("reflect: wrong return count from function created by MakeFunc")
786 }
787
788
789 if numOut > 0 {
790 for i, typ := range ftyp.OutSlice() {
791 v := out[i]
792 if v.typ() == nil {
793 panic("reflect: function created by MakeFunc using " + funcName(f) +
794 " returned zero Value")
795 }
796 if v.flag&flagRO != 0 {
797 panic("reflect: function created by MakeFunc using " + funcName(f) +
798 " returned value obtained from unexported field")
799 }
800 if typ.Size() == 0 {
801 continue
802 }
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818 v = v.assignTo("reflect.MakeFunc", typ, nil)
819 stepsLoop:
820 for _, st := range abid.ret.stepsForValue(i) {
821 switch st.kind {
822 case abiStepStack:
823
824 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
825
826
827
828
829 if v.flag&flagIndir != 0 {
830 memmove(addr, v.ptr, st.size)
831 } else {
832
833 *(*uintptr)(addr) = uintptr(v.ptr)
834 }
835
836 break stepsLoop
837 case abiStepIntReg, abiStepPointer:
838
839 if v.flag&flagIndir != 0 {
840 offset := add(v.ptr, st.offset, "precomputed value offset")
841 intToReg(regs, st.ireg, st.size, offset)
842 } else {
843
844
845
846
847
848 regs.Ints[st.ireg] = uintptr(v.ptr)
849 }
850 case abiStepFloatReg:
851
852 if v.flag&flagIndir == 0 {
853 panic("attempted to copy pointer to FP register")
854 }
855 offset := add(v.ptr, st.offset, "precomputed value offset")
856 floatToReg(regs, st.freg, st.size, offset)
857 default:
858 panic("unknown ABI part kind")
859 }
860 }
861 }
862 }
863
864
865
866 *retValid = true
867
868
869
870
871
872 runtime.KeepAlive(out)
873
874
875
876
877 runtime.KeepAlive(ctxt)
878 }
879
880
881
882
883
884
885
886
887 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
888 i := methodIndex
889 if v.typ().Kind() == abi.Interface {
890 tt := (*interfaceType)(unsafe.Pointer(v.typ()))
891 if uint(i) >= uint(len(tt.Methods)) {
892 panic("reflect: internal error: invalid method index")
893 }
894 m := &tt.Methods[i]
895 if !tt.nameOff(m.Name).IsExported() {
896 panic("reflect: " + op + " of unexported method")
897 }
898 iface := (*nonEmptyInterface)(v.ptr)
899 if iface.itab == nil {
900 panic("reflect: " + op + " of method on nil interface value")
901 }
902 rcvrtype = iface.itab.typ
903 fn = unsafe.Pointer(&iface.itab.fun[i])
904 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
905 } else {
906 rcvrtype = v.typ()
907 ms := v.typ().ExportedMethods()
908 if uint(i) >= uint(len(ms)) {
909 panic("reflect: internal error: invalid method index")
910 }
911 m := ms[i]
912 if !nameOffFor(v.typ(), m.Name).IsExported() {
913 panic("reflect: " + op + " of unexported method")
914 }
915 ifn := textOffFor(v.typ(), m.Ifn)
916 fn = unsafe.Pointer(&ifn)
917 t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp)))
918 }
919 return
920 }
921
922
923
924
925
926 func storeRcvr(v Value, p unsafe.Pointer) {
927 t := v.typ()
928 if t.Kind() == abi.Interface {
929
930 iface := (*nonEmptyInterface)(v.ptr)
931 *(*unsafe.Pointer)(p) = iface.word
932 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
933 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
934 } else {
935 *(*unsafe.Pointer)(p) = v.ptr
936 }
937 }
938
939
940
941 func align(x, n uintptr) uintptr {
942 return (x + n - 1) &^ (n - 1)
943 }
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
965 rcvr := ctxt.rcvr
966 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
967
968
969
970
971
972
973
974
975
976 _, _, valueABI := funcLayout(valueFuncType, nil)
977 valueFrame, valueRegs := frame, regs
978 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
979
980
981
982 methodFrame := methodFramePool.Get().(unsafe.Pointer)
983 var methodRegs abi.RegArgs
984
985
986 switch st := methodABI.call.steps[0]; st.kind {
987 case abiStepStack:
988
989
990 storeRcvr(rcvr, methodFrame)
991 case abiStepPointer:
992
993 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg]))
994 fallthrough
995 case abiStepIntReg:
996 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg]))
997 case abiStepFloatReg:
998 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg]))
999 default:
1000 panic("unknown ABI parameter kind")
1001 }
1002
1003
1004 for i, t := range valueFuncType.InSlice() {
1005 valueSteps := valueABI.call.stepsForValue(i)
1006 methodSteps := methodABI.call.stepsForValue(i + 1)
1007
1008
1009 if len(valueSteps) == 0 {
1010 if len(methodSteps) != 0 {
1011 panic("method ABI and value ABI do not align")
1012 }
1013 continue
1014 }
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
1027 mStep := methodSteps[0]
1028
1029 if mStep.kind == abiStepStack {
1030 if vStep.size != mStep.size {
1031 panic("method ABI and value ABI do not align")
1032 }
1033 typedmemmove(t,
1034 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
1035 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
1036 continue
1037 }
1038
1039 for _, mStep := range methodSteps {
1040 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
1041 switch mStep.kind {
1042 case abiStepPointer:
1043
1044 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1045 fallthrough
1046 case abiStepIntReg:
1047 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1048 case abiStepFloatReg:
1049 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1050 default:
1051 panic("unexpected method step")
1052 }
1053 }
1054 continue
1055 }
1056
1057 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1058 for _, vStep := range valueSteps {
1059 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1060 switch vStep.kind {
1061 case abiStepPointer:
1062
1063 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1064 case abiStepIntReg:
1065 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1066 case abiStepFloatReg:
1067 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1068 default:
1069 panic("unexpected value step")
1070 }
1071 }
1072 continue
1073 }
1074
1075 if len(valueSteps) != len(methodSteps) {
1076
1077
1078
1079 panic("method ABI and value ABI don't align")
1080 }
1081 for i, vStep := range valueSteps {
1082 mStep := methodSteps[i]
1083 if mStep.kind != vStep.kind {
1084 panic("method ABI and value ABI don't align")
1085 }
1086 switch vStep.kind {
1087 case abiStepPointer:
1088
1089 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1090 fallthrough
1091 case abiStepIntReg:
1092 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1093 case abiStepFloatReg:
1094 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1095 default:
1096 panic("unexpected value step")
1097 }
1098 }
1099 }
1100
1101 methodFrameSize := methodFrameType.Size()
1102
1103
1104 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1105 methodFrameSize += methodABI.spill
1106
1107
1108 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1109
1110
1111
1112
1113 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.Size()), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124 if valueRegs != nil {
1125 *valueRegs = methodRegs
1126 }
1127 if retSize := methodFrameType.Size() - methodABI.retOffset; retSize > 0 {
1128 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1129 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1130
1131 memmove(valueRet, methodRet, retSize)
1132 }
1133
1134
1135
1136 *retValid = true
1137
1138
1139
1140
1141 typedmemclr(methodFrameType, methodFrame)
1142 methodFramePool.Put(methodFrame)
1143
1144
1145 runtime.KeepAlive(ctxt)
1146
1147
1148
1149
1150 runtime.KeepAlive(valueRegs)
1151 }
1152
1153
1154 func funcName(f func([]Value) []Value) string {
1155 pc := *(*uintptr)(unsafe.Pointer(&f))
1156 rf := runtime.FuncForPC(pc)
1157 if rf != nil {
1158 return rf.Name()
1159 }
1160 return "closure"
1161 }
1162
1163
1164
1165 func (v Value) Cap() int {
1166
1167 if v.kind() == Slice {
1168 return (*unsafeheader.Slice)(v.ptr).Cap
1169 }
1170 return v.capNonSlice()
1171 }
1172
1173 func (v Value) capNonSlice() int {
1174 k := v.kind()
1175 switch k {
1176 case Array:
1177 return v.typ().Len()
1178 case Chan:
1179 return chancap(v.pointer())
1180 case Ptr:
1181 if v.typ().Elem().Kind() == abi.Array {
1182 return v.typ().Elem().Len()
1183 }
1184 panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
1185 }
1186 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1187 }
1188
1189
1190
1191 func (v Value) Close() {
1192 v.mustBe(Chan)
1193 v.mustBeExported()
1194 chanclose(v.pointer())
1195 }
1196
1197
1198 func (v Value) CanComplex() bool {
1199 switch v.kind() {
1200 case Complex64, Complex128:
1201 return true
1202 default:
1203 return false
1204 }
1205 }
1206
1207
1208
1209 func (v Value) Complex() complex128 {
1210 k := v.kind()
1211 switch k {
1212 case Complex64:
1213 return complex128(*(*complex64)(v.ptr))
1214 case Complex128:
1215 return *(*complex128)(v.ptr)
1216 }
1217 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1218 }
1219
1220
1221
1222
1223
1224 func (v Value) Elem() Value {
1225 k := v.kind()
1226 switch k {
1227 case Interface:
1228 var eface any
1229 if v.typ().NumMethod() == 0 {
1230 eface = *(*any)(v.ptr)
1231 } else {
1232 eface = (any)(*(*interface {
1233 M()
1234 })(v.ptr))
1235 }
1236 x := unpackEface(eface)
1237 if x.flag != 0 {
1238 x.flag |= v.flag.ro()
1239 }
1240 return x
1241 case Pointer:
1242 ptr := v.ptr
1243 if v.flag&flagIndir != 0 {
1244 if ifaceIndir(v.typ()) {
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1256 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1257 }
1258 }
1259 ptr = *(*unsafe.Pointer)(ptr)
1260 }
1261
1262 if ptr == nil {
1263 return Value{}
1264 }
1265 tt := (*ptrType)(unsafe.Pointer(v.typ()))
1266 typ := tt.Elem
1267 fl := v.flag&flagRO | flagIndir | flagAddr
1268 fl |= flag(typ.Kind())
1269 return Value{typ, ptr, fl}
1270 }
1271 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1272 }
1273
1274
1275
1276 func (v Value) Field(i int) Value {
1277 if v.kind() != Struct {
1278 panic(&ValueError{"reflect.Value.Field", v.kind()})
1279 }
1280 tt := (*structType)(unsafe.Pointer(v.typ()))
1281 if uint(i) >= uint(len(tt.Fields)) {
1282 panic("reflect: Field index out of range")
1283 }
1284 field := &tt.Fields[i]
1285 typ := field.Typ
1286
1287
1288 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1289
1290 if !field.Name.IsExported() {
1291 if field.Embedded() {
1292 fl |= flagEmbedRO
1293 } else {
1294 fl |= flagStickyRO
1295 }
1296 }
1297
1298
1299
1300
1301
1302 ptr := add(v.ptr, field.Offset, "same as non-reflect &v.field")
1303 return Value{typ, ptr, fl}
1304 }
1305
1306
1307
1308
1309 func (v Value) FieldByIndex(index []int) Value {
1310 if len(index) == 1 {
1311 return v.Field(index[0])
1312 }
1313 v.mustBe(Struct)
1314 for i, x := range index {
1315 if i > 0 {
1316 if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct {
1317 if v.IsNil() {
1318 panic("reflect: indirection through nil pointer to embedded struct")
1319 }
1320 v = v.Elem()
1321 }
1322 }
1323 v = v.Field(x)
1324 }
1325 return v
1326 }
1327
1328
1329
1330
1331
1332 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1333 if len(index) == 1 {
1334 return v.Field(index[0]), nil
1335 }
1336 v.mustBe(Struct)
1337 for i, x := range index {
1338 if i > 0 {
1339 if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct {
1340 if v.IsNil() {
1341 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem()))
1342 }
1343 v = v.Elem()
1344 }
1345 }
1346 v = v.Field(x)
1347 }
1348 return v, nil
1349 }
1350
1351
1352
1353
1354 func (v Value) FieldByName(name string) Value {
1355 v.mustBe(Struct)
1356 if f, ok := toRType(v.typ()).FieldByName(name); ok {
1357 return v.FieldByIndex(f.Index)
1358 }
1359 return Value{}
1360 }
1361
1362
1363
1364
1365
1366 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1367 if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok {
1368 return v.FieldByIndex(f.Index)
1369 }
1370 return Value{}
1371 }
1372
1373
1374 func (v Value) CanFloat() bool {
1375 switch v.kind() {
1376 case Float32, Float64:
1377 return true
1378 default:
1379 return false
1380 }
1381 }
1382
1383
1384
1385 func (v Value) Float() float64 {
1386 k := v.kind()
1387 switch k {
1388 case Float32:
1389 return float64(*(*float32)(v.ptr))
1390 case Float64:
1391 return *(*float64)(v.ptr)
1392 }
1393 panic(&ValueError{"reflect.Value.Float", v.kind()})
1394 }
1395
1396 var uint8Type = rtypeOf(uint8(0))
1397
1398
1399
1400 func (v Value) Index(i int) Value {
1401 switch v.kind() {
1402 case Array:
1403 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1404 if uint(i) >= uint(tt.Len) {
1405 panic("reflect: array index out of range")
1406 }
1407 typ := tt.Elem
1408 offset := uintptr(i) * typ.Size()
1409
1410
1411
1412
1413
1414
1415 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1416 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1417 return Value{typ, val, fl}
1418
1419 case Slice:
1420
1421
1422 s := (*unsafeheader.Slice)(v.ptr)
1423 if uint(i) >= uint(s.Len) {
1424 panic("reflect: slice index out of range")
1425 }
1426 tt := (*sliceType)(unsafe.Pointer(v.typ()))
1427 typ := tt.Elem
1428 val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
1429 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1430 return Value{typ, val, fl}
1431
1432 case String:
1433 s := (*unsafeheader.String)(v.ptr)
1434 if uint(i) >= uint(s.Len) {
1435 panic("reflect: string index out of range")
1436 }
1437 p := arrayAt(s.Data, i, 1, "i < s.Len")
1438 fl := v.flag.ro() | flag(Uint8) | flagIndir
1439 return Value{uint8Type, p, fl}
1440 }
1441 panic(&ValueError{"reflect.Value.Index", v.kind()})
1442 }
1443
1444
1445 func (v Value) CanInt() bool {
1446 switch v.kind() {
1447 case Int, Int8, Int16, Int32, Int64:
1448 return true
1449 default:
1450 return false
1451 }
1452 }
1453
1454
1455
1456 func (v Value) Int() int64 {
1457 k := v.kind()
1458 p := v.ptr
1459 switch k {
1460 case Int:
1461 return int64(*(*int)(p))
1462 case Int8:
1463 return int64(*(*int8)(p))
1464 case Int16:
1465 return int64(*(*int16)(p))
1466 case Int32:
1467 return int64(*(*int32)(p))
1468 case Int64:
1469 return *(*int64)(p)
1470 }
1471 panic(&ValueError{"reflect.Value.Int", v.kind()})
1472 }
1473
1474
1475 func (v Value) CanInterface() bool {
1476 if v.flag == 0 {
1477 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1478 }
1479 return v.flag&flagRO == 0
1480 }
1481
1482
1483
1484
1485
1486
1487
1488
1489 func (v Value) Interface() (i any) {
1490 return valueInterface(v, true)
1491 }
1492
1493 func valueInterface(v Value, safe bool) any {
1494 if v.flag == 0 {
1495 panic(&ValueError{"reflect.Value.Interface", Invalid})
1496 }
1497 if safe && v.flag&flagRO != 0 {
1498
1499
1500
1501 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1502 }
1503 if v.flag&flagMethod != 0 {
1504 v = makeMethodValue("Interface", v)
1505 }
1506
1507 if v.kind() == Interface {
1508
1509
1510
1511 if v.NumMethod() == 0 {
1512 return *(*any)(v.ptr)
1513 }
1514 return *(*interface {
1515 M()
1516 })(v.ptr)
1517 }
1518
1519
1520 return packEface(v)
1521 }
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532 func (v Value) InterfaceData() [2]uintptr {
1533 v.mustBe(Interface)
1534
1535 escapes(v.ptr)
1536
1537
1538
1539
1540
1541 return *(*[2]uintptr)(v.ptr)
1542 }
1543
1544
1545
1546
1547
1548
1549
1550
1551 func (v Value) IsNil() bool {
1552 k := v.kind()
1553 switch k {
1554 case Chan, Func, Map, Pointer, UnsafePointer:
1555 if v.flag&flagMethod != 0 {
1556 return false
1557 }
1558 ptr := v.ptr
1559 if v.flag&flagIndir != 0 {
1560 ptr = *(*unsafe.Pointer)(ptr)
1561 }
1562 return ptr == nil
1563 case Interface, Slice:
1564
1565
1566 return *(*unsafe.Pointer)(v.ptr) == nil
1567 }
1568 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1569 }
1570
1571
1572
1573
1574
1575
1576 func (v Value) IsValid() bool {
1577 return v.flag != 0
1578 }
1579
1580
1581
1582 func (v Value) IsZero() bool {
1583 switch v.kind() {
1584 case Bool:
1585 return !v.Bool()
1586 case Int, Int8, Int16, Int32, Int64:
1587 return v.Int() == 0
1588 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1589 return v.Uint() == 0
1590 case Float32, Float64:
1591 return math.Float64bits(v.Float()) == 0
1592 case Complex64, Complex128:
1593 c := v.Complex()
1594 return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
1595 case Array:
1596
1597 if v.typ().Equal != nil && v.typ().Size() <= maxZero {
1598 if v.flag&flagIndir == 0 {
1599 return v.ptr == nil
1600 }
1601
1602
1603
1604 return v.typ().Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1605 }
1606
1607 n := v.Len()
1608 for i := 0; i < n; i++ {
1609 if !v.Index(i).IsZero() {
1610 return false
1611 }
1612 }
1613 return true
1614 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1615 return v.IsNil()
1616 case String:
1617 return v.Len() == 0
1618 case Struct:
1619
1620 if v.typ().Equal != nil && v.typ().Size() <= maxZero {
1621 if v.flag&flagIndir == 0 {
1622 return v.ptr == nil
1623 }
1624
1625 return v.typ().Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1626 }
1627
1628 n := v.NumField()
1629 for i := 0; i < n; i++ {
1630 if !v.Field(i).IsZero() {
1631 return false
1632 }
1633 }
1634 return true
1635 default:
1636
1637
1638 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1639 }
1640 }
1641
1642
1643
1644 func (v Value) SetZero() {
1645 v.mustBeAssignable()
1646 switch v.kind() {
1647 case Bool:
1648 *(*bool)(v.ptr) = false
1649 case Int:
1650 *(*int)(v.ptr) = 0
1651 case Int8:
1652 *(*int8)(v.ptr) = 0
1653 case Int16:
1654 *(*int16)(v.ptr) = 0
1655 case Int32:
1656 *(*int32)(v.ptr) = 0
1657 case Int64:
1658 *(*int64)(v.ptr) = 0
1659 case Uint:
1660 *(*uint)(v.ptr) = 0
1661 case Uint8:
1662 *(*uint8)(v.ptr) = 0
1663 case Uint16:
1664 *(*uint16)(v.ptr) = 0
1665 case Uint32:
1666 *(*uint32)(v.ptr) = 0
1667 case Uint64:
1668 *(*uint64)(v.ptr) = 0
1669 case Uintptr:
1670 *(*uintptr)(v.ptr) = 0
1671 case Float32:
1672 *(*float32)(v.ptr) = 0
1673 case Float64:
1674 *(*float64)(v.ptr) = 0
1675 case Complex64:
1676 *(*complex64)(v.ptr) = 0
1677 case Complex128:
1678 *(*complex128)(v.ptr) = 0
1679 case String:
1680 *(*string)(v.ptr) = ""
1681 case Slice:
1682 *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{}
1683 case Interface:
1684 *(*[2]unsafe.Pointer)(v.ptr) = [2]unsafe.Pointer{}
1685 case Chan, Func, Map, Pointer, UnsafePointer:
1686 *(*unsafe.Pointer)(v.ptr) = nil
1687 case Array, Struct:
1688 typedmemclr(v.typ(), v.ptr)
1689 default:
1690
1691
1692 panic(&ValueError{"reflect.Value.SetZero", v.Kind()})
1693 }
1694 }
1695
1696
1697
1698 func (v Value) Kind() Kind {
1699 return v.kind()
1700 }
1701
1702
1703
1704 func (v Value) Len() int {
1705
1706 if v.kind() == Slice {
1707 return (*unsafeheader.Slice)(v.ptr).Len
1708 }
1709 return v.lenNonSlice()
1710 }
1711
1712 func (v Value) lenNonSlice() int {
1713 switch k := v.kind(); k {
1714 case Array:
1715 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1716 return int(tt.Len)
1717 case Chan:
1718 return chanlen(v.pointer())
1719 case Map:
1720 return maplen(v.pointer())
1721 case String:
1722
1723 return (*unsafeheader.String)(v.ptr).Len
1724 case Ptr:
1725 if v.typ().Elem().Kind() == abi.Array {
1726 return v.typ().Elem().Len()
1727 }
1728 panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
1729 }
1730 panic(&ValueError{"reflect.Value.Len", v.kind()})
1731 }
1732
1733 var stringType = rtypeOf("")
1734
1735
1736
1737
1738
1739 func (v Value) MapIndex(key Value) Value {
1740 v.mustBe(Map)
1741 tt := (*mapType)(unsafe.Pointer(v.typ()))
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751 var e unsafe.Pointer
1752 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
1753 k := *(*string)(key.ptr)
1754 e = mapaccess_faststr(v.typ(), v.pointer(), k)
1755 } else {
1756 key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
1757 var k unsafe.Pointer
1758 if key.flag&flagIndir != 0 {
1759 k = key.ptr
1760 } else {
1761 k = unsafe.Pointer(&key.ptr)
1762 }
1763 e = mapaccess(v.typ(), v.pointer(), k)
1764 }
1765 if e == nil {
1766 return Value{}
1767 }
1768 typ := tt.Elem
1769 fl := (v.flag | key.flag).ro()
1770 fl |= flag(typ.Kind())
1771 return copyVal(typ, fl, e)
1772 }
1773
1774
1775
1776
1777
1778 func (v Value) MapKeys() []Value {
1779 v.mustBe(Map)
1780 tt := (*mapType)(unsafe.Pointer(v.typ()))
1781 keyType := tt.Key
1782
1783 fl := v.flag.ro() | flag(keyType.Kind())
1784
1785 m := v.pointer()
1786 mlen := int(0)
1787 if m != nil {
1788 mlen = maplen(m)
1789 }
1790 var it hiter
1791 mapiterinit(v.typ(), m, &it)
1792 a := make([]Value, mlen)
1793 var i int
1794 for i = 0; i < len(a); i++ {
1795 key := mapiterkey(&it)
1796 if key == nil {
1797
1798
1799
1800 break
1801 }
1802 a[i] = copyVal(keyType, fl, key)
1803 mapiternext(&it)
1804 }
1805 return a[:i]
1806 }
1807
1808
1809
1810
1811
1812 type hiter struct {
1813 key unsafe.Pointer
1814 elem unsafe.Pointer
1815 t unsafe.Pointer
1816 h unsafe.Pointer
1817 buckets unsafe.Pointer
1818 bptr unsafe.Pointer
1819 overflow *[]unsafe.Pointer
1820 oldoverflow *[]unsafe.Pointer
1821 startBucket uintptr
1822 offset uint8
1823 wrapped bool
1824 B uint8
1825 i uint8
1826 bucket uintptr
1827 checkBucket uintptr
1828 }
1829
1830 func (h *hiter) initialized() bool {
1831 return h.t != nil
1832 }
1833
1834
1835
1836 type MapIter struct {
1837 m Value
1838 hiter hiter
1839 }
1840
1841
1842 func (iter *MapIter) Key() Value {
1843 if !iter.hiter.initialized() {
1844 panic("MapIter.Key called before Next")
1845 }
1846 iterkey := mapiterkey(&iter.hiter)
1847 if iterkey == nil {
1848 panic("MapIter.Key called on exhausted iterator")
1849 }
1850
1851 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1852 ktype := t.Key
1853 return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
1854 }
1855
1856
1857
1858
1859
1860 func (v Value) SetIterKey(iter *MapIter) {
1861 if !iter.hiter.initialized() {
1862 panic("reflect: Value.SetIterKey called before Next")
1863 }
1864 iterkey := mapiterkey(&iter.hiter)
1865 if iterkey == nil {
1866 panic("reflect: Value.SetIterKey called on exhausted iterator")
1867 }
1868
1869 v.mustBeAssignable()
1870 var target unsafe.Pointer
1871 if v.kind() == Interface {
1872 target = v.ptr
1873 }
1874
1875 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1876 ktype := t.Key
1877
1878 iter.m.mustBeExported()
1879 key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
1880 key = key.assignTo("reflect.MapIter.SetKey", v.typ(), target)
1881 typedmemmove(v.typ(), v.ptr, key.ptr)
1882 }
1883
1884
1885 func (iter *MapIter) Value() Value {
1886 if !iter.hiter.initialized() {
1887 panic("MapIter.Value called before Next")
1888 }
1889 iterelem := mapiterelem(&iter.hiter)
1890 if iterelem == nil {
1891 panic("MapIter.Value called on exhausted iterator")
1892 }
1893
1894 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1895 vtype := t.Elem
1896 return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
1897 }
1898
1899
1900
1901
1902
1903 func (v Value) SetIterValue(iter *MapIter) {
1904 if !iter.hiter.initialized() {
1905 panic("reflect: Value.SetIterValue called before Next")
1906 }
1907 iterelem := mapiterelem(&iter.hiter)
1908 if iterelem == nil {
1909 panic("reflect: Value.SetIterValue called on exhausted iterator")
1910 }
1911
1912 v.mustBeAssignable()
1913 var target unsafe.Pointer
1914 if v.kind() == Interface {
1915 target = v.ptr
1916 }
1917
1918 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1919 vtype := t.Elem
1920
1921 iter.m.mustBeExported()
1922 elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
1923 elem = elem.assignTo("reflect.MapIter.SetValue", v.typ(), target)
1924 typedmemmove(v.typ(), v.ptr, elem.ptr)
1925 }
1926
1927
1928
1929
1930 func (iter *MapIter) Next() bool {
1931 if !iter.m.IsValid() {
1932 panic("MapIter.Next called on an iterator that does not have an associated map Value")
1933 }
1934 if !iter.hiter.initialized() {
1935 mapiterinit(iter.m.typ(), iter.m.pointer(), &iter.hiter)
1936 } else {
1937 if mapiterkey(&iter.hiter) == nil {
1938 panic("MapIter.Next called on exhausted iterator")
1939 }
1940 mapiternext(&iter.hiter)
1941 }
1942 return mapiterkey(&iter.hiter) != nil
1943 }
1944
1945
1946
1947
1948
1949 func (iter *MapIter) Reset(v Value) {
1950 if v.IsValid() {
1951 v.mustBe(Map)
1952 }
1953 iter.m = v
1954 iter.hiter = hiter{}
1955 }
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972 func (v Value) MapRange() *MapIter {
1973
1974
1975
1976
1977 if v.kind() != Map {
1978 v.panicNotMap()
1979 }
1980 return &MapIter{m: v}
1981 }
1982
1983
1984
1985
1986
1987
1988 func (f flag) panicNotMap() {
1989 f.mustBe(Map)
1990 }
1991
1992
1993
1994 func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value {
1995 if typ.IfaceIndir() {
1996
1997
1998 c := unsafe_New(typ)
1999 typedmemmove(typ, c, ptr)
2000 return Value{typ, c, fl | flagIndir}
2001 }
2002 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
2003 }
2004
2005
2006
2007
2008
2009 func (v Value) Method(i int) Value {
2010 if v.typ() == nil {
2011 panic(&ValueError{"reflect.Value.Method", Invalid})
2012 }
2013 if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) {
2014 panic("reflect: Method index out of range")
2015 }
2016 if v.typ().Kind() == abi.Interface && v.IsNil() {
2017 panic("reflect: Method on nil interface value")
2018 }
2019 fl := v.flag.ro() | (v.flag & flagIndir)
2020 fl |= flag(Func)
2021 fl |= flag(i)<<flagMethodShift | flagMethod
2022 return Value{v.typ(), v.ptr, fl}
2023 }
2024
2025
2026
2027
2028
2029
2030 func (v Value) NumMethod() int {
2031 if v.typ() == nil {
2032 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
2033 }
2034 if v.flag&flagMethod != 0 {
2035 return 0
2036 }
2037 return toRType(v.typ()).NumMethod()
2038 }
2039
2040
2041
2042
2043
2044
2045 func (v Value) MethodByName(name string) Value {
2046 if v.typ() == nil {
2047 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
2048 }
2049 if v.flag&flagMethod != 0 {
2050 return Value{}
2051 }
2052 m, ok := toRType(v.typ()).MethodByName(name)
2053 if !ok {
2054 return Value{}
2055 }
2056 return v.Method(m.Index)
2057 }
2058
2059
2060
2061 func (v Value) NumField() int {
2062 v.mustBe(Struct)
2063 tt := (*structType)(unsafe.Pointer(v.typ()))
2064 return len(tt.Fields)
2065 }
2066
2067
2068
2069 func (v Value) OverflowComplex(x complex128) bool {
2070 k := v.kind()
2071 switch k {
2072 case Complex64:
2073 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
2074 case Complex128:
2075 return false
2076 }
2077 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
2078 }
2079
2080
2081
2082 func (v Value) OverflowFloat(x float64) bool {
2083 k := v.kind()
2084 switch k {
2085 case Float32:
2086 return overflowFloat32(x)
2087 case Float64:
2088 return false
2089 }
2090 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
2091 }
2092
2093 func overflowFloat32(x float64) bool {
2094 if x < 0 {
2095 x = -x
2096 }
2097 return math.MaxFloat32 < x && x <= math.MaxFloat64
2098 }
2099
2100
2101
2102 func (v Value) OverflowInt(x int64) bool {
2103 k := v.kind()
2104 switch k {
2105 case Int, Int8, Int16, Int32, Int64:
2106 bitSize := v.typ().Size() * 8
2107 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2108 return x != trunc
2109 }
2110 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
2111 }
2112
2113
2114
2115 func (v Value) OverflowUint(x uint64) bool {
2116 k := v.kind()
2117 switch k {
2118 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
2119 bitSize := v.typ_.Size() * 8
2120 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2121 return x != trunc
2122 }
2123 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
2124 }
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144 func (v Value) Pointer() uintptr {
2145
2146 escapes(v.ptr)
2147
2148 k := v.kind()
2149 switch k {
2150 case Pointer:
2151 if v.typ().PtrBytes == 0 {
2152 val := *(*uintptr)(v.ptr)
2153
2154
2155 if !verifyNotInHeapPtr(val) {
2156 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
2157 }
2158 return val
2159 }
2160 fallthrough
2161 case Chan, Map, UnsafePointer:
2162 return uintptr(v.pointer())
2163 case Func:
2164 if v.flag&flagMethod != 0 {
2165
2166
2167
2168
2169
2170
2171 return methodValueCallCodePtr()
2172 }
2173 p := v.pointer()
2174
2175
2176 if p != nil {
2177 p = *(*unsafe.Pointer)(p)
2178 }
2179 return uintptr(p)
2180
2181 case Slice:
2182 return uintptr((*unsafeheader.Slice)(v.ptr).Data)
2183 }
2184 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2185 }
2186
2187
2188
2189
2190
2191
2192 func (v Value) Recv() (x Value, ok bool) {
2193 v.mustBe(Chan)
2194 v.mustBeExported()
2195 return v.recv(false)
2196 }
2197
2198
2199
2200 func (v Value) recv(nb bool) (val Value, ok bool) {
2201 tt := (*chanType)(unsafe.Pointer(v.typ()))
2202 if ChanDir(tt.Dir)&RecvDir == 0 {
2203 panic("reflect: recv on send-only channel")
2204 }
2205 t := tt.Elem
2206 val = Value{t, nil, flag(t.Kind())}
2207 var p unsafe.Pointer
2208 if ifaceIndir(t) {
2209 p = unsafe_New(t)
2210 val.ptr = p
2211 val.flag |= flagIndir
2212 } else {
2213 p = unsafe.Pointer(&val.ptr)
2214 }
2215 selected, ok := chanrecv(v.pointer(), nb, p)
2216 if !selected {
2217 val = Value{}
2218 }
2219 return
2220 }
2221
2222
2223
2224
2225 func (v Value) Send(x Value) {
2226 v.mustBe(Chan)
2227 v.mustBeExported()
2228 v.send(x, false)
2229 }
2230
2231
2232
2233 func (v Value) send(x Value, nb bool) (selected bool) {
2234 tt := (*chanType)(unsafe.Pointer(v.typ()))
2235 if ChanDir(tt.Dir)&SendDir == 0 {
2236 panic("reflect: send on recv-only channel")
2237 }
2238 x.mustBeExported()
2239 x = x.assignTo("reflect.Value.Send", tt.Elem, nil)
2240 var p unsafe.Pointer
2241 if x.flag&flagIndir != 0 {
2242 p = x.ptr
2243 } else {
2244 p = unsafe.Pointer(&x.ptr)
2245 }
2246 return chansend(v.pointer(), p, nb)
2247 }
2248
2249
2250
2251
2252
2253 func (v Value) Set(x Value) {
2254 v.mustBeAssignable()
2255 x.mustBeExported()
2256 var target unsafe.Pointer
2257 if v.kind() == Interface {
2258 target = v.ptr
2259 }
2260 x = x.assignTo("reflect.Set", v.typ(), target)
2261 if x.flag&flagIndir != 0 {
2262 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2263 typedmemclr(v.typ(), v.ptr)
2264 } else {
2265 typedmemmove(v.typ(), v.ptr, x.ptr)
2266 }
2267 } else {
2268 *(*unsafe.Pointer)(v.ptr) = x.ptr
2269 }
2270 }
2271
2272
2273
2274 func (v Value) SetBool(x bool) {
2275 v.mustBeAssignable()
2276 v.mustBe(Bool)
2277 *(*bool)(v.ptr) = x
2278 }
2279
2280
2281
2282 func (v Value) SetBytes(x []byte) {
2283 v.mustBeAssignable()
2284 v.mustBe(Slice)
2285 if toRType(v.typ()).Elem().Kind() != Uint8 {
2286 panic("reflect.Value.SetBytes of non-byte slice")
2287 }
2288 *(*[]byte)(v.ptr) = x
2289 }
2290
2291
2292
2293 func (v Value) setRunes(x []rune) {
2294 v.mustBeAssignable()
2295 v.mustBe(Slice)
2296 if v.typ().Elem().Kind() != abi.Int32 {
2297 panic("reflect.Value.setRunes of non-rune slice")
2298 }
2299 *(*[]rune)(v.ptr) = x
2300 }
2301
2302
2303
2304 func (v Value) SetComplex(x complex128) {
2305 v.mustBeAssignable()
2306 switch k := v.kind(); k {
2307 default:
2308 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2309 case Complex64:
2310 *(*complex64)(v.ptr) = complex64(x)
2311 case Complex128:
2312 *(*complex128)(v.ptr) = x
2313 }
2314 }
2315
2316
2317
2318 func (v Value) SetFloat(x float64) {
2319 v.mustBeAssignable()
2320 switch k := v.kind(); k {
2321 default:
2322 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2323 case Float32:
2324 *(*float32)(v.ptr) = float32(x)
2325 case Float64:
2326 *(*float64)(v.ptr) = x
2327 }
2328 }
2329
2330
2331
2332 func (v Value) SetInt(x int64) {
2333 v.mustBeAssignable()
2334 switch k := v.kind(); k {
2335 default:
2336 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2337 case Int:
2338 *(*int)(v.ptr) = int(x)
2339 case Int8:
2340 *(*int8)(v.ptr) = int8(x)
2341 case Int16:
2342 *(*int16)(v.ptr) = int16(x)
2343 case Int32:
2344 *(*int32)(v.ptr) = int32(x)
2345 case Int64:
2346 *(*int64)(v.ptr) = x
2347 }
2348 }
2349
2350
2351
2352
2353 func (v Value) SetLen(n int) {
2354 v.mustBeAssignable()
2355 v.mustBe(Slice)
2356 s := (*unsafeheader.Slice)(v.ptr)
2357 if uint(n) > uint(s.Cap) {
2358 panic("reflect: slice length out of range in SetLen")
2359 }
2360 s.Len = n
2361 }
2362
2363
2364
2365
2366 func (v Value) SetCap(n int) {
2367 v.mustBeAssignable()
2368 v.mustBe(Slice)
2369 s := (*unsafeheader.Slice)(v.ptr)
2370 if n < s.Len || n > s.Cap {
2371 panic("reflect: slice capacity out of range in SetCap")
2372 }
2373 s.Cap = n
2374 }
2375
2376
2377
2378
2379
2380
2381
2382 func (v Value) SetMapIndex(key, elem Value) {
2383 v.mustBe(Map)
2384 v.mustBeExported()
2385 key.mustBeExported()
2386 tt := (*mapType)(unsafe.Pointer(v.typ()))
2387
2388 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
2389 k := *(*string)(key.ptr)
2390 if elem.typ() == nil {
2391 mapdelete_faststr(v.typ(), v.pointer(), k)
2392 return
2393 }
2394 elem.mustBeExported()
2395 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2396 var e unsafe.Pointer
2397 if elem.flag&flagIndir != 0 {
2398 e = elem.ptr
2399 } else {
2400 e = unsafe.Pointer(&elem.ptr)
2401 }
2402 mapassign_faststr(v.typ(), v.pointer(), k, e)
2403 return
2404 }
2405
2406 key = key.assignTo("reflect.Value.SetMapIndex", tt.Key, nil)
2407 var k unsafe.Pointer
2408 if key.flag&flagIndir != 0 {
2409 k = key.ptr
2410 } else {
2411 k = unsafe.Pointer(&key.ptr)
2412 }
2413 if elem.typ() == nil {
2414 mapdelete(v.typ(), v.pointer(), k)
2415 return
2416 }
2417 elem.mustBeExported()
2418 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2419 var e unsafe.Pointer
2420 if elem.flag&flagIndir != 0 {
2421 e = elem.ptr
2422 } else {
2423 e = unsafe.Pointer(&elem.ptr)
2424 }
2425 mapassign(v.typ(), v.pointer(), k, e)
2426 }
2427
2428
2429
2430 func (v Value) SetUint(x uint64) {
2431 v.mustBeAssignable()
2432 switch k := v.kind(); k {
2433 default:
2434 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2435 case Uint:
2436 *(*uint)(v.ptr) = uint(x)
2437 case Uint8:
2438 *(*uint8)(v.ptr) = uint8(x)
2439 case Uint16:
2440 *(*uint16)(v.ptr) = uint16(x)
2441 case Uint32:
2442 *(*uint32)(v.ptr) = uint32(x)
2443 case Uint64:
2444 *(*uint64)(v.ptr) = x
2445 case Uintptr:
2446 *(*uintptr)(v.ptr) = uintptr(x)
2447 }
2448 }
2449
2450
2451
2452 func (v Value) SetPointer(x unsafe.Pointer) {
2453 v.mustBeAssignable()
2454 v.mustBe(UnsafePointer)
2455 *(*unsafe.Pointer)(v.ptr) = x
2456 }
2457
2458
2459
2460 func (v Value) SetString(x string) {
2461 v.mustBeAssignable()
2462 v.mustBe(String)
2463 *(*string)(v.ptr) = x
2464 }
2465
2466
2467
2468
2469 func (v Value) Slice(i, j int) Value {
2470 var (
2471 cap int
2472 typ *sliceType
2473 base unsafe.Pointer
2474 )
2475 switch kind := v.kind(); kind {
2476 default:
2477 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2478
2479 case Array:
2480 if v.flag&flagAddr == 0 {
2481 panic("reflect.Value.Slice: slice of unaddressable array")
2482 }
2483 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2484 cap = int(tt.Len)
2485 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2486 base = v.ptr
2487
2488 case Slice:
2489 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2490 s := (*unsafeheader.Slice)(v.ptr)
2491 base = s.Data
2492 cap = s.Cap
2493
2494 case String:
2495 s := (*unsafeheader.String)(v.ptr)
2496 if i < 0 || j < i || j > s.Len {
2497 panic("reflect.Value.Slice: string slice index out of bounds")
2498 }
2499 var t unsafeheader.String
2500 if i < s.Len {
2501 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2502 }
2503 return Value{v.typ(), unsafe.Pointer(&t), v.flag}
2504 }
2505
2506 if i < 0 || j < i || j > cap {
2507 panic("reflect.Value.Slice: slice index out of bounds")
2508 }
2509
2510
2511 var x []unsafe.Pointer
2512
2513
2514 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2515 s.Len = j - i
2516 s.Cap = cap - i
2517 if cap-i > 0 {
2518 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < cap")
2519 } else {
2520
2521 s.Data = base
2522 }
2523
2524 fl := v.flag.ro() | flagIndir | flag(Slice)
2525 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2526 }
2527
2528
2529
2530
2531 func (v Value) Slice3(i, j, k int) Value {
2532 var (
2533 cap int
2534 typ *sliceType
2535 base unsafe.Pointer
2536 )
2537 switch kind := v.kind(); kind {
2538 default:
2539 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2540
2541 case Array:
2542 if v.flag&flagAddr == 0 {
2543 panic("reflect.Value.Slice3: slice of unaddressable array")
2544 }
2545 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2546 cap = int(tt.Len)
2547 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2548 base = v.ptr
2549
2550 case Slice:
2551 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2552 s := (*unsafeheader.Slice)(v.ptr)
2553 base = s.Data
2554 cap = s.Cap
2555 }
2556
2557 if i < 0 || j < i || k < j || k > cap {
2558 panic("reflect.Value.Slice3: slice index out of bounds")
2559 }
2560
2561
2562
2563 var x []unsafe.Pointer
2564
2565
2566 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2567 s.Len = j - i
2568 s.Cap = k - i
2569 if k-i > 0 {
2570 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < k <= cap")
2571 } else {
2572
2573 s.Data = base
2574 }
2575
2576 fl := v.flag.ro() | flagIndir | flag(Slice)
2577 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2578 }
2579
2580
2581
2582
2583
2584
2585
2586 func (v Value) String() string {
2587
2588 if v.kind() == String {
2589 return *(*string)(v.ptr)
2590 }
2591 return v.stringNonString()
2592 }
2593
2594 func (v Value) stringNonString() string {
2595 if v.kind() == Invalid {
2596 return "<invalid Value>"
2597 }
2598
2599
2600 return "<" + v.Type().String() + " Value>"
2601 }
2602
2603
2604
2605
2606
2607
2608 func (v Value) TryRecv() (x Value, ok bool) {
2609 v.mustBe(Chan)
2610 v.mustBeExported()
2611 return v.recv(true)
2612 }
2613
2614
2615
2616
2617
2618 func (v Value) TrySend(x Value) bool {
2619 v.mustBe(Chan)
2620 v.mustBeExported()
2621 return v.send(x, true)
2622 }
2623
2624
2625 func (v Value) Type() Type {
2626 if v.flag != 0 && v.flag&flagMethod == 0 {
2627 return (*rtype)(noescape(unsafe.Pointer(v.typ_)))
2628 }
2629 return v.typeSlow()
2630 }
2631
2632 func (v Value) typeSlow() Type {
2633 if v.flag == 0 {
2634 panic(&ValueError{"reflect.Value.Type", Invalid})
2635 }
2636
2637 typ := v.typ()
2638 if v.flag&flagMethod == 0 {
2639 return toRType(v.typ())
2640 }
2641
2642
2643
2644 i := int(v.flag) >> flagMethodShift
2645 if v.typ().Kind() == abi.Interface {
2646
2647 tt := (*interfaceType)(unsafe.Pointer(typ))
2648 if uint(i) >= uint(len(tt.Methods)) {
2649 panic("reflect: internal error: invalid method index")
2650 }
2651 m := &tt.Methods[i]
2652 return toRType(typeOffFor(typ, m.Typ))
2653 }
2654
2655 ms := typ.ExportedMethods()
2656 if uint(i) >= uint(len(ms)) {
2657 panic("reflect: internal error: invalid method index")
2658 }
2659 m := ms[i]
2660 return toRType(typeOffFor(typ, m.Mtyp))
2661 }
2662
2663
2664 func (v Value) CanUint() bool {
2665 switch v.kind() {
2666 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2667 return true
2668 default:
2669 return false
2670 }
2671 }
2672
2673
2674
2675 func (v Value) Uint() uint64 {
2676 k := v.kind()
2677 p := v.ptr
2678 switch k {
2679 case Uint:
2680 return uint64(*(*uint)(p))
2681 case Uint8:
2682 return uint64(*(*uint8)(p))
2683 case Uint16:
2684 return uint64(*(*uint16)(p))
2685 case Uint32:
2686 return uint64(*(*uint32)(p))
2687 case Uint64:
2688 return *(*uint64)(p)
2689 case Uintptr:
2690 return uint64(*(*uintptr)(p))
2691 }
2692 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2693 }
2694
2695
2696
2697
2698
2699
2700
2701
2702
2703
2704 func (v Value) UnsafeAddr() uintptr {
2705 if v.typ() == nil {
2706 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2707 }
2708 if v.flag&flagAddr == 0 {
2709 panic("reflect.Value.UnsafeAddr of unaddressable value")
2710 }
2711
2712 escapes(v.ptr)
2713 return uintptr(v.ptr)
2714 }
2715
2716
2717
2718
2719
2720
2721
2722
2723
2724
2725
2726
2727 func (v Value) UnsafePointer() unsafe.Pointer {
2728 k := v.kind()
2729 switch k {
2730 case Pointer:
2731 if v.typ().PtrBytes == 0 {
2732
2733
2734 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2735 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2736 }
2737 return *(*unsafe.Pointer)(v.ptr)
2738 }
2739 fallthrough
2740 case Chan, Map, UnsafePointer:
2741 return v.pointer()
2742 case Func:
2743 if v.flag&flagMethod != 0 {
2744
2745
2746
2747
2748
2749
2750 code := methodValueCallCodePtr()
2751 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2752 }
2753 p := v.pointer()
2754
2755
2756 if p != nil {
2757 p = *(*unsafe.Pointer)(p)
2758 }
2759 return p
2760
2761 case Slice:
2762 return (*unsafeheader.Slice)(v.ptr).Data
2763 }
2764 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2765 }
2766
2767
2768
2769
2770
2771
2772
2773
2774
2775 type StringHeader struct {
2776 Data uintptr
2777 Len int
2778 }
2779
2780
2781
2782
2783
2784
2785
2786
2787
2788 type SliceHeader struct {
2789 Data uintptr
2790 Len int
2791 Cap int
2792 }
2793
2794 func typesMustMatch(what string, t1, t2 Type) {
2795 if t1 != t2 {
2796 panic(what + ": " + t1.String() + " != " + t2.String())
2797 }
2798 }
2799
2800
2801
2802
2803
2804
2805
2806
2807 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2808 return add(p, uintptr(i)*eltSize, "i < len")
2809 }
2810
2811
2812
2813
2814
2815
2816
2817 func (v Value) Grow(n int) {
2818 v.mustBeAssignable()
2819 v.mustBe(Slice)
2820 v.grow(n)
2821 }
2822
2823
2824 func (v Value) grow(n int) {
2825 p := (*unsafeheader.Slice)(v.ptr)
2826 switch {
2827 case n < 0:
2828 panic("reflect.Value.Grow: negative len")
2829 case p.Len+n < 0:
2830 panic("reflect.Value.Grow: slice overflow")
2831 case p.Len+n > p.Cap:
2832 t := v.typ().Elem()
2833 *p = growslice(t, *p, n)
2834 }
2835 }
2836
2837
2838
2839
2840
2841
2842
2843 func (v Value) extendSlice(n int) Value {
2844 v.mustBeExported()
2845 v.mustBe(Slice)
2846
2847
2848 sh := *(*unsafeheader.Slice)(v.ptr)
2849 s := &sh
2850 v.ptr = unsafe.Pointer(s)
2851 v.flag = flagIndir | flag(Slice)
2852
2853 v.grow(n)
2854 s.Len += n
2855 return v
2856 }
2857
2858
2859
2860
2861 func (v Value) Clear() {
2862 switch v.Kind() {
2863 case Slice:
2864 sh := *(*unsafeheader.Slice)(v.ptr)
2865 st := (*sliceType)(unsafe.Pointer(v.typ()))
2866 typedarrayclear(st.Elem, sh.Data, sh.Len)
2867 case Map:
2868 mapclear(v.typ(), v.pointer())
2869 default:
2870 panic(&ValueError{"reflect.Value.Clear", v.Kind()})
2871 }
2872 }
2873
2874
2875
2876 func Append(s Value, x ...Value) Value {
2877 s.mustBe(Slice)
2878 n := s.Len()
2879 s = s.extendSlice(len(x))
2880 for i, v := range x {
2881 s.Index(n + i).Set(v)
2882 }
2883 return s
2884 }
2885
2886
2887
2888 func AppendSlice(s, t Value) Value {
2889 s.mustBe(Slice)
2890 t.mustBe(Slice)
2891 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2892 ns := s.Len()
2893 nt := t.Len()
2894 s = s.extendSlice(nt)
2895 Copy(s.Slice(ns, ns+nt), t)
2896 return s
2897 }
2898
2899
2900
2901
2902
2903
2904
2905
2906 func Copy(dst, src Value) int {
2907 dk := dst.kind()
2908 if dk != Array && dk != Slice {
2909 panic(&ValueError{"reflect.Copy", dk})
2910 }
2911 if dk == Array {
2912 dst.mustBeAssignable()
2913 }
2914 dst.mustBeExported()
2915
2916 sk := src.kind()
2917 var stringCopy bool
2918 if sk != Array && sk != Slice {
2919 stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8
2920 if !stringCopy {
2921 panic(&ValueError{"reflect.Copy", sk})
2922 }
2923 }
2924 src.mustBeExported()
2925
2926 de := dst.typ().Elem()
2927 if !stringCopy {
2928 se := src.typ().Elem()
2929 typesMustMatch("reflect.Copy", toType(de), toType(se))
2930 }
2931
2932 var ds, ss unsafeheader.Slice
2933 if dk == Array {
2934 ds.Data = dst.ptr
2935 ds.Len = dst.Len()
2936 ds.Cap = ds.Len
2937 } else {
2938 ds = *(*unsafeheader.Slice)(dst.ptr)
2939 }
2940 if sk == Array {
2941 ss.Data = src.ptr
2942 ss.Len = src.Len()
2943 ss.Cap = ss.Len
2944 } else if sk == Slice {
2945 ss = *(*unsafeheader.Slice)(src.ptr)
2946 } else {
2947 sh := *(*unsafeheader.String)(src.ptr)
2948 ss.Data = sh.Data
2949 ss.Len = sh.Len
2950 ss.Cap = sh.Len
2951 }
2952
2953 return typedslicecopy(de.Common(), ds, ss)
2954 }
2955
2956
2957
2958 type runtimeSelect struct {
2959 dir SelectDir
2960 typ *rtype
2961 ch unsafe.Pointer
2962 val unsafe.Pointer
2963 }
2964
2965
2966
2967
2968
2969
2970
2971
2972
2973
2974
2975
2976 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2977
2978
2979 type SelectDir int
2980
2981
2982
2983 const (
2984 _ SelectDir = iota
2985 SelectSend
2986 SelectRecv
2987 SelectDefault
2988 )
2989
2990
2991
2992
2993
2994
2995
2996
2997
2998
2999
3000
3001
3002
3003
3004
3005
3006 type SelectCase struct {
3007 Dir SelectDir
3008 Chan Value
3009 Send Value
3010 }
3011
3012
3013
3014
3015
3016
3017
3018
3019
3020 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
3021 if len(cases) > 65536 {
3022 panic("reflect.Select: too many cases (max 65536)")
3023 }
3024
3025
3026
3027 var runcases []runtimeSelect
3028 if len(cases) > 4 {
3029
3030 runcases = make([]runtimeSelect, len(cases))
3031 } else {
3032
3033 runcases = make([]runtimeSelect, len(cases), 4)
3034 }
3035
3036 haveDefault := false
3037 for i, c := range cases {
3038 rc := &runcases[i]
3039 rc.dir = c.Dir
3040 switch c.Dir {
3041 default:
3042 panic("reflect.Select: invalid Dir")
3043
3044 case SelectDefault:
3045 if haveDefault {
3046 panic("reflect.Select: multiple default cases")
3047 }
3048 haveDefault = true
3049 if c.Chan.IsValid() {
3050 panic("reflect.Select: default case has Chan value")
3051 }
3052 if c.Send.IsValid() {
3053 panic("reflect.Select: default case has Send value")
3054 }
3055
3056 case SelectSend:
3057 ch := c.Chan
3058 if !ch.IsValid() {
3059 break
3060 }
3061 ch.mustBe(Chan)
3062 ch.mustBeExported()
3063 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3064 if ChanDir(tt.Dir)&SendDir == 0 {
3065 panic("reflect.Select: SendDir case using recv-only channel")
3066 }
3067 rc.ch = ch.pointer()
3068 rc.typ = toRType(&tt.Type)
3069 v := c.Send
3070 if !v.IsValid() {
3071 panic("reflect.Select: SendDir case missing Send value")
3072 }
3073 v.mustBeExported()
3074 v = v.assignTo("reflect.Select", tt.Elem, nil)
3075 if v.flag&flagIndir != 0 {
3076 rc.val = v.ptr
3077 } else {
3078 rc.val = unsafe.Pointer(&v.ptr)
3079 }
3080
3081
3082 escapes(rc.val)
3083
3084 case SelectRecv:
3085 if c.Send.IsValid() {
3086 panic("reflect.Select: RecvDir case has Send value")
3087 }
3088 ch := c.Chan
3089 if !ch.IsValid() {
3090 break
3091 }
3092 ch.mustBe(Chan)
3093 ch.mustBeExported()
3094 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3095 if ChanDir(tt.Dir)&RecvDir == 0 {
3096 panic("reflect.Select: RecvDir case using send-only channel")
3097 }
3098 rc.ch = ch.pointer()
3099 rc.typ = toRType(&tt.Type)
3100 rc.val = unsafe_New(tt.Elem)
3101 }
3102 }
3103
3104 chosen, recvOK = rselect(runcases)
3105 if runcases[chosen].dir == SelectRecv {
3106 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
3107 t := tt.Elem
3108 p := runcases[chosen].val
3109 fl := flag(t.Kind())
3110 if t.IfaceIndir() {
3111 recv = Value{t, p, fl | flagIndir}
3112 } else {
3113 recv = Value{t, *(*unsafe.Pointer)(p), fl}
3114 }
3115 }
3116 return chosen, recv, recvOK
3117 }
3118
3119
3122
3123
3124
3125
3126 func unsafe_New(*abi.Type) unsafe.Pointer
3127
3128
3129 func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
3130
3131
3132
3133 func MakeSlice(typ Type, len, cap int) Value {
3134 if typ.Kind() != Slice {
3135 panic("reflect.MakeSlice of non-slice type")
3136 }
3137 if len < 0 {
3138 panic("reflect.MakeSlice: negative len")
3139 }
3140 if cap < 0 {
3141 panic("reflect.MakeSlice: negative cap")
3142 }
3143 if len > cap {
3144 panic("reflect.MakeSlice: len > cap")
3145 }
3146
3147 s := unsafeheader.Slice{Data: unsafe_NewArray(&(typ.Elem().(*rtype).t), cap), Len: len, Cap: cap}
3148 return Value{&typ.(*rtype).t, unsafe.Pointer(&s), flagIndir | flag(Slice)}
3149 }
3150
3151
3152 func MakeChan(typ Type, buffer int) Value {
3153 if typ.Kind() != Chan {
3154 panic("reflect.MakeChan of non-chan type")
3155 }
3156 if buffer < 0 {
3157 panic("reflect.MakeChan: negative buffer size")
3158 }
3159 if typ.ChanDir() != BothDir {
3160 panic("reflect.MakeChan: unidirectional channel type")
3161 }
3162 t := typ.common()
3163 ch := makechan(t, buffer)
3164 return Value{t, ch, flag(Chan)}
3165 }
3166
3167
3168 func MakeMap(typ Type) Value {
3169 return MakeMapWithSize(typ, 0)
3170 }
3171
3172
3173
3174 func MakeMapWithSize(typ Type, n int) Value {
3175 if typ.Kind() != Map {
3176 panic("reflect.MakeMapWithSize of non-map type")
3177 }
3178 t := typ.common()
3179 m := makemap(t, n)
3180 return Value{t, m, flag(Map)}
3181 }
3182
3183
3184
3185
3186 func Indirect(v Value) Value {
3187 if v.Kind() != Pointer {
3188 return v
3189 }
3190 return v.Elem()
3191 }
3192
3193
3194
3195
3196
3197
3198
3199 const go121noForceValueEscape = true
3200
3201
3202
3203 func ValueOf(i any) Value {
3204 if i == nil {
3205 return Value{}
3206 }
3207
3208 if !go121noForceValueEscape {
3209 escapes(i)
3210 }
3211
3212 return unpackEface(i)
3213 }
3214
3215
3216
3217
3218
3219
3220 func Zero(typ Type) Value {
3221 if typ == nil {
3222 panic("reflect: Zero(nil)")
3223 }
3224 t := &typ.(*rtype).t
3225 fl := flag(t.Kind())
3226 if t.IfaceIndir() {
3227 var p unsafe.Pointer
3228 if t.Size() <= maxZero {
3229 p = unsafe.Pointer(&zeroVal[0])
3230 } else {
3231 p = unsafe_New(t)
3232 }
3233 return Value{t, p, fl | flagIndir}
3234 }
3235 return Value{t, nil, fl}
3236 }
3237
3238
3239 const maxZero = 1024
3240
3241
3242 var zeroVal [maxZero]byte
3243
3244
3245
3246 func New(typ Type) Value {
3247 if typ == nil {
3248 panic("reflect: New(nil)")
3249 }
3250 t := &typ.(*rtype).t
3251 pt := ptrTo(t)
3252 if ifaceIndir(pt) {
3253
3254 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3255 }
3256 ptr := unsafe_New(t)
3257 fl := flag(Pointer)
3258 return Value{pt, ptr, fl}
3259 }
3260
3261
3262
3263 func NewAt(typ Type, p unsafe.Pointer) Value {
3264 fl := flag(Pointer)
3265 t := typ.(*rtype)
3266 return Value{t.ptrTo(), p, fl}
3267 }
3268
3269
3270
3271
3272
3273
3274 func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Value {
3275 if v.flag&flagMethod != 0 {
3276 v = makeMethodValue(context, v)
3277 }
3278
3279 switch {
3280 case directlyAssignable(dst, v.typ()):
3281
3282
3283 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3284 fl |= flag(dst.Kind())
3285 return Value{dst, v.ptr, fl}
3286
3287 case implements(dst, v.typ()):
3288 if v.Kind() == Interface && v.IsNil() {
3289
3290
3291
3292 return Value{dst, nil, flag(Interface)}
3293 }
3294 x := valueInterface(v, false)
3295 if target == nil {
3296 target = unsafe_New(dst)
3297 }
3298 if dst.NumMethod() == 0 {
3299 *(*any)(target) = x
3300 } else {
3301 ifaceE2I(dst, x, target)
3302 }
3303 return Value{dst, target, flagIndir | flag(Interface)}
3304 }
3305
3306
3307 panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst))
3308 }
3309
3310
3311
3312
3313 func (v Value) Convert(t Type) Value {
3314 if v.flag&flagMethod != 0 {
3315 v = makeMethodValue("Convert", v)
3316 }
3317 op := convertOp(t.common(), v.typ())
3318 if op == nil {
3319 panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String())
3320 }
3321 return op(v, t)
3322 }
3323
3324
3325
3326 func (v Value) CanConvert(t Type) bool {
3327 vt := v.Type()
3328 if !vt.ConvertibleTo(t) {
3329 return false
3330 }
3331
3332
3333 switch {
3334 case vt.Kind() == Slice && t.Kind() == Array:
3335 if t.Len() > v.Len() {
3336 return false
3337 }
3338 case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array:
3339 n := t.Elem().Len()
3340 if n > v.Len() {
3341 return false
3342 }
3343 }
3344 return true
3345 }
3346
3347
3348
3349
3350
3351 func (v Value) Comparable() bool {
3352 k := v.Kind()
3353 switch k {
3354 case Invalid:
3355 return false
3356
3357 case Array:
3358 switch v.Type().Elem().Kind() {
3359 case Interface, Array, Struct:
3360 for i := 0; i < v.Type().Len(); i++ {
3361 if !v.Index(i).Comparable() {
3362 return false
3363 }
3364 }
3365 return true
3366 }
3367 return v.Type().Comparable()
3368
3369 case Interface:
3370 return v.Elem().Comparable()
3371
3372 case Struct:
3373 for i := 0; i < v.NumField(); i++ {
3374 if !v.Field(i).Comparable() {
3375 return false
3376 }
3377 }
3378 return true
3379
3380 default:
3381 return v.Type().Comparable()
3382 }
3383 }
3384
3385
3386
3387
3388
3389
3390
3391
3392
3393 func (v Value) Equal(u Value) bool {
3394 if v.Kind() == Interface {
3395 v = v.Elem()
3396 }
3397 if u.Kind() == Interface {
3398 u = u.Elem()
3399 }
3400
3401 if !v.IsValid() || !u.IsValid() {
3402 return v.IsValid() == u.IsValid()
3403 }
3404
3405 if v.Kind() != u.Kind() || v.Type() != u.Type() {
3406 return false
3407 }
3408
3409
3410
3411 switch v.Kind() {
3412 default:
3413 panic("reflect.Value.Equal: invalid Kind")
3414 case Bool:
3415 return v.Bool() == u.Bool()
3416 case Int, Int8, Int16, Int32, Int64:
3417 return v.Int() == u.Int()
3418 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3419 return v.Uint() == u.Uint()
3420 case Float32, Float64:
3421 return v.Float() == u.Float()
3422 case Complex64, Complex128:
3423 return v.Complex() == u.Complex()
3424 case String:
3425 return v.String() == u.String()
3426 case Chan, Pointer, UnsafePointer:
3427 return v.Pointer() == u.Pointer()
3428 case Array:
3429
3430 vl := v.Len()
3431 if vl == 0 {
3432
3433 if !v.Type().Elem().Comparable() {
3434 break
3435 }
3436 return true
3437 }
3438 for i := 0; i < vl; i++ {
3439 if !v.Index(i).Equal(u.Index(i)) {
3440 return false
3441 }
3442 }
3443 return true
3444 case Struct:
3445
3446 nf := v.NumField()
3447 for i := 0; i < nf; i++ {
3448 if !v.Field(i).Equal(u.Field(i)) {
3449 return false
3450 }
3451 }
3452 return true
3453 case Func, Map, Slice:
3454 break
3455 }
3456 panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable")
3457 }
3458
3459
3460
3461 func convertOp(dst, src *abi.Type) func(Value, Type) Value {
3462 switch Kind(src.Kind()) {
3463 case Int, Int8, Int16, Int32, Int64:
3464 switch Kind(dst.Kind()) {
3465 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3466 return cvtInt
3467 case Float32, Float64:
3468 return cvtIntFloat
3469 case String:
3470 return cvtIntString
3471 }
3472
3473 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3474 switch Kind(dst.Kind()) {
3475 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3476 return cvtUint
3477 case Float32, Float64:
3478 return cvtUintFloat
3479 case String:
3480 return cvtUintString
3481 }
3482
3483 case Float32, Float64:
3484 switch Kind(dst.Kind()) {
3485 case Int, Int8, Int16, Int32, Int64:
3486 return cvtFloatInt
3487 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3488 return cvtFloatUint
3489 case Float32, Float64:
3490 return cvtFloat
3491 }
3492
3493 case Complex64, Complex128:
3494 switch Kind(dst.Kind()) {
3495 case Complex64, Complex128:
3496 return cvtComplex
3497 }
3498
3499 case String:
3500 if dst.Kind() == abi.Slice && pkgPathFor(dst.Elem()) == "" {
3501 switch Kind(dst.Elem().Kind()) {
3502 case Uint8:
3503 return cvtStringBytes
3504 case Int32:
3505 return cvtStringRunes
3506 }
3507 }
3508
3509 case Slice:
3510 if dst.Kind() == abi.String && pkgPathFor(src.Elem()) == "" {
3511 switch Kind(src.Elem().Kind()) {
3512 case Uint8:
3513 return cvtBytesString
3514 case Int32:
3515 return cvtRunesString
3516 }
3517 }
3518
3519
3520 if dst.Kind() == abi.Pointer && dst.Elem().Kind() == abi.Array && src.Elem() == dst.Elem().Elem() {
3521 return cvtSliceArrayPtr
3522 }
3523
3524
3525 if dst.Kind() == abi.Array && src.Elem() == dst.Elem() {
3526 return cvtSliceArray
3527 }
3528
3529 case Chan:
3530 if dst.Kind() == abi.Chan && specialChannelAssignability(dst, src) {
3531 return cvtDirect
3532 }
3533 }
3534
3535
3536 if haveIdenticalUnderlyingType(dst, src, false) {
3537 return cvtDirect
3538 }
3539
3540
3541 if dst.Kind() == abi.Pointer && nameFor(dst) == "" &&
3542 src.Kind() == abi.Pointer && nameFor(src) == "" &&
3543 haveIdenticalUnderlyingType(elem(dst), elem(src), false) {
3544 return cvtDirect
3545 }
3546
3547 if implements(dst, src) {
3548 if src.Kind() == abi.Interface {
3549 return cvtI2I
3550 }
3551 return cvtT2I
3552 }
3553
3554 return nil
3555 }
3556
3557
3558
3559 func makeInt(f flag, bits uint64, t Type) Value {
3560 typ := t.common()
3561 ptr := unsafe_New(typ)
3562 switch typ.Size() {
3563 case 1:
3564 *(*uint8)(ptr) = uint8(bits)
3565 case 2:
3566 *(*uint16)(ptr) = uint16(bits)
3567 case 4:
3568 *(*uint32)(ptr) = uint32(bits)
3569 case 8:
3570 *(*uint64)(ptr) = bits
3571 }
3572 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3573 }
3574
3575
3576
3577 func makeFloat(f flag, v float64, t Type) Value {
3578 typ := t.common()
3579 ptr := unsafe_New(typ)
3580 switch typ.Size() {
3581 case 4:
3582 *(*float32)(ptr) = float32(v)
3583 case 8:
3584 *(*float64)(ptr) = v
3585 }
3586 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3587 }
3588
3589
3590 func makeFloat32(f flag, v float32, t Type) Value {
3591 typ := t.common()
3592 ptr := unsafe_New(typ)
3593 *(*float32)(ptr) = v
3594 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3595 }
3596
3597
3598
3599 func makeComplex(f flag, v complex128, t Type) Value {
3600 typ := t.common()
3601 ptr := unsafe_New(typ)
3602 switch typ.Size() {
3603 case 8:
3604 *(*complex64)(ptr) = complex64(v)
3605 case 16:
3606 *(*complex128)(ptr) = v
3607 }
3608 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3609 }
3610
3611 func makeString(f flag, v string, t Type) Value {
3612 ret := New(t).Elem()
3613 ret.SetString(v)
3614 ret.flag = ret.flag&^flagAddr | f
3615 return ret
3616 }
3617
3618 func makeBytes(f flag, v []byte, t Type) Value {
3619 ret := New(t).Elem()
3620 ret.SetBytes(v)
3621 ret.flag = ret.flag&^flagAddr | f
3622 return ret
3623 }
3624
3625 func makeRunes(f flag, v []rune, t Type) Value {
3626 ret := New(t).Elem()
3627 ret.setRunes(v)
3628 ret.flag = ret.flag&^flagAddr | f
3629 return ret
3630 }
3631
3632
3633
3634
3635
3636
3637
3638 func cvtInt(v Value, t Type) Value {
3639 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3640 }
3641
3642
3643 func cvtUint(v Value, t Type) Value {
3644 return makeInt(v.flag.ro(), v.Uint(), t)
3645 }
3646
3647
3648 func cvtFloatInt(v Value, t Type) Value {
3649 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3650 }
3651
3652
3653 func cvtFloatUint(v Value, t Type) Value {
3654 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3655 }
3656
3657
3658 func cvtIntFloat(v Value, t Type) Value {
3659 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3660 }
3661
3662
3663 func cvtUintFloat(v Value, t Type) Value {
3664 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3665 }
3666
3667
3668 func cvtFloat(v Value, t Type) Value {
3669 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3670
3671
3672
3673 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3674 }
3675 return makeFloat(v.flag.ro(), v.Float(), t)
3676 }
3677
3678
3679 func cvtComplex(v Value, t Type) Value {
3680 return makeComplex(v.flag.ro(), v.Complex(), t)
3681 }
3682
3683
3684 func cvtIntString(v Value, t Type) Value {
3685 s := "\uFFFD"
3686 if x := v.Int(); int64(rune(x)) == x {
3687 s = string(rune(x))
3688 }
3689 return makeString(v.flag.ro(), s, t)
3690 }
3691
3692
3693 func cvtUintString(v Value, t Type) Value {
3694 s := "\uFFFD"
3695 if x := v.Uint(); uint64(rune(x)) == x {
3696 s = string(rune(x))
3697 }
3698 return makeString(v.flag.ro(), s, t)
3699 }
3700
3701
3702 func cvtBytesString(v Value, t Type) Value {
3703 return makeString(v.flag.ro(), string(v.Bytes()), t)
3704 }
3705
3706
3707 func cvtStringBytes(v Value, t Type) Value {
3708 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3709 }
3710
3711
3712 func cvtRunesString(v Value, t Type) Value {
3713 return makeString(v.flag.ro(), string(v.runes()), t)
3714 }
3715
3716
3717 func cvtStringRunes(v Value, t Type) Value {
3718 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3719 }
3720
3721
3722 func cvtSliceArrayPtr(v Value, t Type) Value {
3723 n := t.Elem().Len()
3724 if n > v.Len() {
3725 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
3726 }
3727 h := (*unsafeheader.Slice)(v.ptr)
3728 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3729 }
3730
3731
3732 func cvtSliceArray(v Value, t Type) Value {
3733 n := t.Len()
3734 if n > v.Len() {
3735 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to array with length " + itoa.Itoa(n))
3736 }
3737 h := (*unsafeheader.Slice)(v.ptr)
3738 typ := t.common()
3739 ptr := h.Data
3740 c := unsafe_New(typ)
3741 typedmemmove(typ, c, ptr)
3742 ptr = c
3743
3744 return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)}
3745 }
3746
3747
3748 func cvtDirect(v Value, typ Type) Value {
3749 f := v.flag
3750 t := typ.common()
3751 ptr := v.ptr
3752 if f&flagAddr != 0 {
3753
3754 c := unsafe_New(t)
3755 typedmemmove(t, c, ptr)
3756 ptr = c
3757 f &^= flagAddr
3758 }
3759 return Value{t, ptr, v.flag.ro() | f}
3760 }
3761
3762
3763 func cvtT2I(v Value, typ Type) Value {
3764 target := unsafe_New(typ.common())
3765 x := valueInterface(v, false)
3766 if typ.NumMethod() == 0 {
3767 *(*any)(target) = x
3768 } else {
3769 ifaceE2I(typ.common(), x, target)
3770 }
3771 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3772 }
3773
3774
3775 func cvtI2I(v Value, typ Type) Value {
3776 if v.IsNil() {
3777 ret := Zero(typ)
3778 ret.flag |= v.flag.ro()
3779 return ret
3780 }
3781 return cvtT2I(v.Elem(), typ)
3782 }
3783
3784
3785
3786
3787 func chancap(ch unsafe.Pointer) int
3788
3789
3790 func chanclose(ch unsafe.Pointer)
3791
3792
3793 func chanlen(ch unsafe.Pointer) int
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3804
3805
3806 func chansend0(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3807
3808 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool {
3809 contentEscapes(val)
3810 return chansend0(ch, val, nb)
3811 }
3812
3813 func makechan(typ *abi.Type, size int) (ch unsafe.Pointer)
3814 func makemap(t *abi.Type, cap int) (m unsafe.Pointer)
3815
3816
3817 func mapaccess(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3818
3819
3820 func mapaccess_faststr(t *abi.Type, m unsafe.Pointer, key string) (val unsafe.Pointer)
3821
3822
3823 func mapassign0(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer)
3824
3825 func mapassign(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) {
3826 contentEscapes(key)
3827 contentEscapes(val)
3828 mapassign0(t, m, key, val)
3829 }
3830
3831
3832 func mapassign_faststr0(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer)
3833
3834 func mapassign_faststr(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) {
3835 contentEscapes((*unsafeheader.String)(unsafe.Pointer(&key)).Data)
3836 contentEscapes(val)
3837 mapassign_faststr0(t, m, key, val)
3838 }
3839
3840
3841 func mapdelete(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer)
3842
3843
3844 func mapdelete_faststr(t *abi.Type, m unsafe.Pointer, key string)
3845
3846
3847 func mapiterinit(t *abi.Type, m unsafe.Pointer, it *hiter)
3848
3849
3850 func mapiterkey(it *hiter) (key unsafe.Pointer)
3851
3852
3853 func mapiterelem(it *hiter) (elem unsafe.Pointer)
3854
3855
3856 func mapiternext(it *hiter)
3857
3858
3859 func maplen(m unsafe.Pointer) int
3860
3861 func mapclear(t *abi.Type, m unsafe.Pointer)
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886
3887
3888
3889 func call(stackArgsType *abi.Type, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3890
3891 func ifaceE2I(t *abi.Type, src any, dst unsafe.Pointer)
3892
3893
3894
3895
3896 func memmove(dst, src unsafe.Pointer, size uintptr)
3897
3898
3899
3900
3901 func typedmemmove(t *abi.Type, dst, src unsafe.Pointer)
3902
3903
3904
3905
3906 func typedmemclr(t *abi.Type, ptr unsafe.Pointer)
3907
3908
3909
3910
3911
3912 func typedmemclrpartial(t *abi.Type, ptr unsafe.Pointer, off, size uintptr)
3913
3914
3915
3916
3917
3918 func typedslicecopy(t *abi.Type, dst, src unsafeheader.Slice) int
3919
3920
3921
3922
3923
3924 func typedarrayclear(elemType *abi.Type, ptr unsafe.Pointer, len int)
3925
3926
3927 func typehash(t *abi.Type, p unsafe.Pointer, h uintptr) uintptr
3928
3929 func verifyNotInHeapPtr(p uintptr) bool
3930
3931
3932 func growslice(t *abi.Type, old unsafeheader.Slice, num int) unsafeheader.Slice
3933
3934
3935
3936
3937 func escapes(x any) {
3938 if dummy.b {
3939 dummy.x = x
3940 }
3941 }
3942
3943 var dummy struct {
3944 b bool
3945 x any
3946 }
3947
3948
3949
3950
3951
3952 func contentEscapes(x unsafe.Pointer) {
3953 if dummy.b {
3954 escapes(*(*any)(x))
3955 }
3956 }
3957
3958
3959 func noescape(p unsafe.Pointer) unsafe.Pointer {
3960 x := uintptr(p)
3961 return unsafe.Pointer(x ^ 0)
3962 }
3963
View as plain text