D: [iurt_root_command] chroot Building target platforms: noarch Building for target noarch Installing /home/pterjan/rpmbuild/SRPMS/golang-github-huandu-xstrings-1.3.2-2.mga10.src.rpm Executing(%mkbuilddir): /bin/sh -e /home/pterjan/rpmbuild/tmp/rpm-tmp.6lrepp Executing(%prep): /bin/sh -e /home/pterjan/rpmbuild/tmp/rpm-tmp.PvisFz + umask 022 + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build + '[' 1 -eq 1 ']' + '[' 1 -eq 1 ']' + '[' 1 -eq 1 ']' + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build + rm -rf xstrings-1.3.2 + /usr/lib/rpm/rpmuncompress -x /home/pterjan/rpmbuild/SOURCES/xstrings-1.3.2.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd xstrings-1.3.2 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + rm -fr /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/vendor + [[ ! -e /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin ]] + install -m 0755 -vd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin install: creating directory '/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build' install: creating directory '/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin' + export GOPATH=/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build:/usr/share/gocode + GOPATH=/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build:/usr/share/gocode + [[ ! -e /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu/xstrings ]] ++ dirname /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu/xstrings + install -m 0755 -vd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu install: creating directory '/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src' install: creating directory '/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com' install: creating directory '/home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu' + ln -fs /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2 /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu/xstrings + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu/xstrings + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /home/pterjan/rpmbuild/tmp/rpm-tmp.uClbN4 + umask 022 + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build + '[' 1 -eq 1 ']' + '[' /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT '!=' / ']' + rm -rf /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT ++ dirname /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT + mkdir -p /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build + mkdir /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT + cd xstrings-1.3.2 + '[' 1 -eq 1 ']' + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src/github.com/huandu/xstrings + IFS= + gosupfiles= + mapfile -t gosupfilesA + go-rpm-integration install -i github.com/huandu/xstrings -b /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin -s /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build -o golang-github-huandu-xstrings-devel.file-list -O /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2 -V 1.3.2-2.mga10 -p /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT -g /usr/share/gocode -r '.*example.*' /usr/bin/go-rpm-integration: fork: retry: Resource temporarily unavailable Installing: github.com/huandu/xstrings + IFS= +++ realpath -e --relative-base=. CONTRIBUTING.md README.md +++ sort -u ++ listfiles_include=$'CONTRIBUTING.md\nREADME.md' ++ echo $'CONTRIBUTING.md\nREADME.md' + godocs=$'CONTRIBUTING.md\nREADME.md' + [[ -n CONTRIBUTING.md README.md ]] + IFS= + read -r f + echo %doc '"CONTRIBUTING.md"' + IFS= + read -r f + echo %doc '"README.md"' + IFS= + read -r f + IFS= +++ realpath -e --relative-base=. LICENSE +++ sort -u ++ listfiles_include=LICENSE ++ echo LICENSE + golicenses=LICENSE + [[ -n LICENSE ]] + IFS= + read -r f + echo %license '"LICENSE"' + IFS= + read -r f + /usr/lib/rpm/check-buildroot + '[' -n '' ']' + /usr/share/spec-helper/clean_files + '[' -n '' ']' + /usr/share/spec-helper/compress_files .xz + '[' -n '' ']' + /usr/share/spec-helper/relink_symlinks + '[' -n '' ']' + /usr/share/spec-helper/clean_perl + '[' -n '' ']' + /usr/share/spec-helper/lib_symlinks + '[' -n '' ']' + /usr/share/spec-helper/gprintify + '[' -n '' ']' + /usr/share/spec-helper/fix_mo + '[' -n '' ']' + /usr/share/spec-helper/fix_pamd + '[' -n '' ']' + /usr/share/spec-helper/remove_info_dir + '[' -n '' ']' + /usr/share/spec-helper/fix_eol + '[' -n '' ']' + /usr/share/spec-helper/check_desktop_files + '[' -n '' ']' + /usr/share/spec-helper/check_elf_files + /usr/lib/rpm/brp-strip /usr/bin/strip + /usr/lib/rpm/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/brp-remove-la-files + /usr/lib/rpm/redhat/brp-mangle-shebangs + env -u SOURCE_DATE_EPOCH /usr/lib/rpm/redhat/brp-python-bytecompile '' 1 0 -j16 + /usr/lib/rpm/redhat/brp-python-hardlink Executing(%check): /bin/sh -e /home/pterjan/rpmbuild/tmp/rpm-tmp.hjBSsy + umask 022 + cd /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build + cd xstrings-1.3.2 + '[' 1 -eq 1 ']' + GO_LDFLAGS=' -X github.com/huandu/xstrings/version=1.3.2' + GO_TEST_FLAGS='-buildmode pie -compiler gc' + GO_TEST_EXT_LD_FLAGS='-Wl,--as-needed -Wl,--no-undefined -Wl,-z,relro -Wl,-z,now -Wl,-O1 -Wl,--build-id=sha1 -Wl,--enable-new-dtags -specs=/usr/lib/rpm/redhat/redhat-hardened-ld ' + go-rpm-integration check -i github.com/huandu/xstrings -b /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin -s /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build -V 1.3.2-2.mga10 -p /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/BUILDROOT -g /usr/share/gocode -r '.*example.*' Testing in: /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/src PATH: /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/pterjan/.local/bin:/home/pterjan/bin GOPATH: /home/pterjan/rpmbuild/BUILD/golang-github-huandu-xstrings-1.3.2-build/xstrings-1.3.2/_build:/usr/share/gocode GO111MODULE: off command: go test -buildmode pie -compiler gc -ldflags " -X github.com/huandu/xstrings/version=1.3.2 -extldflags '-Wl,--as-needed -Wl,--no-undefined -Wl,-z,relro -Wl,-z,now -Wl,-O1 -Wl,--build-id=sha1 -Wl,--enable-new-dtags -specs=/usr/lib/rpm/redhat/redhat-hardened-ld '" testing: github.com/huandu/xstrings github.com/huandu/xstrings go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool compile: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/compile: resource temporarily unavailable go: error obtaining buildID for go tool asm: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/asm: resource temporarily unavailable go: error obtaining buildID for go tool asm: fork/exec /usr/lib/golang/pkg/tool/linux_amd64/asm: resource temporarily unavailable runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0xc0000dfe10?}) runtime/panic.go:1101 +0x48 fp=0xc0000dfde8 sp=0xc0000dfdb8 pc=0x4852e8 runtime.newosproc(0xc000100008) runtime/os_linux.go:201 +0x165 fp=0xc0000dfe58 sp=0xc0000dfde8 pc=0x445625 runtime.newm1(0xc000100008) runtime/proc.go:2829 +0xbf fp=0xc0000dfe98 sp=0xc0000dfe58 pc=0x450eff runtime.newm(0x2272395108e35?, 0xc000050a08, 0x0?) runtime/proc.go:2804 +0x125 fp=0xc0000dfec8 sp=0xc0000dfe98 pc=0x450dc5 runtime.startm(0xc000050a08?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0xc0000dff18 sp=0xc0000dfec8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0xc0000dff48 sp=0xc0000dff18 pc=0x4855ac runtime.resetspinning() runtime/proc.go:3885 +0x3e fp=0xc0000dff68 sp=0xc0000dff48 pc=0x45413e runtime.schedule() runtime/proc.go:4038 +0x10f fp=0xc0000dffa0 sp=0xc0000dff68 pc=0x4545cf runtime.mstart1() runtime/proc.go:1862 +0xcd fp=0xc0000dffc8 sp=0xc0000dffa0 pc=0x44f98d runtime.mstart0() runtime/proc.go:1808 +0x75 fp=0xc0000dfff8 sp=0xc0000dffc8 pc=0x44f895 runtime.mstart() runtime/asm_amd64.s:395 +0x5 fp=0xc0000e0000 sp=0xc0000dfff8 pc=0x48aca5 goroutine 1 gp=0xc000002380 m=nil [chan receive, locked to thread]: runtime.gopark(0x7fd06ab12108?, 0x0?, 0x0?, 0x0?, 0x10?) runtime/proc.go:435 +0xce fp=0xc0000c0688 sp=0xc0000c0668 pc=0x48540e runtime.chanrecv(0xc000026100, 0x0, 0x1) runtime/chan.go:664 +0x445 fp=0xc0000c0700 sp=0xc0000c0688 pc=0x413f25 runtime.chanrecv1(0x0?, 0x2?) runtime/chan.go:506 +0x12 fp=0xc0000c0728 sp=0xc0000c0700 pc=0x413ab2 runtime.gcenable() runtime/mgc.go:206 +0xb1 fp=0xc0000c0750 sp=0xc0000c0728 pc=0x423611 runtime.main() runtime/proc.go:211 +0x165 fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bd05 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a goroutine 3 gp=0xc000002e00 m=nil [runnable]: runtime.gcenable.gowrap1() runtime/mgc.go:204 fp=0xc0000c17e0 sp=0xc0000c17d8 pc=0x4236a0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c17e8 sp=0xc0000c17e0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:204 +0x66 goroutine 4 gp=0xc000002fc0 m=nil [runnable]: runtime.gcenable.gowrap2() runtime/mgc.go:205 fp=0xc0000c1fe0 sp=0xc0000c1fd8 pc=0x423640 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c1fe8 sp=0xc0000c1fe0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:205 +0xa5 runtime stack: runtime.throw({0xebc45d?, 0x7ffda5e0f820?}) runtime/panic.go:1101 +0x48 fp=0x7ffda5e0f7f8 sp=0x7ffda5e0f7c8 pc=0x4852e8 runtime.newosproc(0xc0000c7008) runtime/os_linux.go:201 +0x165 fp=0x7ffda5e0f868 sp=0x7ffda5e0f7f8 pc=0x445625 runtime.newm1(0xc0000c7008) runtime/proc.go:2829 +0xbf fp=0x7ffda5e0f8a8 sp=0x7ffda5e0f868 pc=0x450eff runtime.newm(0x7fd06ab12108?, 0xc00004c008, 0x7ffda5e0f9f0?) runtime/proc.go:2804 +0x125 fp=0x7ffda5e0f8d8 sp=0x7ffda5e0f8a8 pc=0x450dc5 runtime.startm(0xc00004c008?, 0x0, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffda5e0f928 sp=0x7ffda5e0f8d8 pc=0x4513b9 runtime.handoffp(0x2000?) runtime/proc.go:3039 +0x358 fp=0x7ffda5e0f950 sp=0x7ffda5e0f928 pc=0x451898 runtime.stoplockedm() runtime/proc.go:3161 +0x52 fp=0x7ffda5e0f9a8 sp=0x7ffda5e0f950 pc=0x451912 runtime.schedule() runtime/proc.go:3999 +0x3a fp=0x7ffda5e0f9e0 sp=0x7ffda5e0f9a8 pc=0x4544fa runtime.park_m(0xc000002380) runtime/proc.go:4144 +0x285 fp=0x7ffda5e0fa40 sp=0x7ffda5e0f9e0 pc=0x454a25 runtime.mcall() runtime/asm_amd64.s:459 +0x4e fp=0x7ffda5e0fa58 sp=0x7ffda5e0fa40 pc=0x48ad2e runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0xc0000dfe10?}) runtime/panic.go:1101 +0x48 fp=0xc0000dfde8 sp=0xc0000dfdb8 pc=0x4852e8 runtime.newosproc(0xc000100008) runtime/os_linux.go:201 +0x165 fp=0xc0000dfe58 sp=0xc0000dfde8 pc=0x445625 runtime.newm1(0xc000100008) runtime/proc.go:2829 +0xbf fp=0xc0000dfe98 sp=0xc0000dfe58 pc=0x450eff runtime.newm(0x227239510d4c1?, 0xc000050a08, 0x0?) runtime/proc.go:2804 +0x125 fp=0xc0000dfec8 sp=0xc0000dfe98 pc=0x450dc5 runtime.startm(0xc000050a08?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0xc0000dff18 sp=0xc0000dfec8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0xc0000dff48 sp=0xc0000dff18 pc=0x4855ac runtime.resetspinning() runtime/proc.go:3885 +0x3e fp=0xc0000dff68 sp=0xc0000dff48 pc=0x45413e runtime.schedule() runtime/proc.go:4038 +0x10f fp=0xc0000dffa0 sp=0xc0000dff68 pc=0x4545cf runtime.mstart1() runtime/proc.go:1862 +0xcd fp=0xc0000dffc8 sp=0xc0000dffa0 pc=0x44f98d runtime.mstart0() runtime/proc.go:1808 +0x75 fp=0xc0000dfff8 sp=0xc0000dffc8 pc=0x44f895 runtime.mstart() runtime/asm_amd64.s:395 +0x5 fp=0xc0000e0000 sp=0xc0000dfff8 pc=0x48aca5 goroutine 1 gp=0xc000002380 m=nil [chan receive, locked to thread]: runtime.gopark(0x7faa0ecc9108?, 0x0?, 0x0?, 0x0?, 0x10?) runtime/proc.go:435 +0xce fp=0xc0000c0688 sp=0xc0000c0668 pc=0x48540e runtime.chanrecv(0xc000026100, 0x0, 0x1) runtime/chan.go:664 +0x445 fp=0xc0000c0700 sp=0xc0000c0688 pc=0x413f25 runtime.chanrecv1(0x0?, 0x2?) runtime/chan.go:506 +0x12 fp=0xc0000c0728 sp=0xc0000c0700 pc=0x413ab2 runtime.gcenable() runtime/mgc.go:206 +0xb1 fp=0xc0000c0750 sp=0xc0000c0728 pc=0x423611 runtime.main() runtime/proc.go:211 +0x165 fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bd05 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a goroutine 3 gp=0xc000002e00 m=nil [runnable]: runtime.gcenable.gowrap1() runtime/mgc.go:204 fp=0xc0000c17e0 sp=0xc0000c17d8 pc=0x4236a0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c17e8 sp=0xc0000c17e0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:204 +0x66 goroutine 4 gp=0xc000002fc0 m=nil [runnable]: runtime.gcenable.gowrap2() runtime/mgc.go:205 fp=0xc0000c1fe0 sp=0xc0000c1fd8 pc=0x423640 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c1fe8 sp=0xc0000c1fe0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:205 +0xa5 runtime stack: runtime.throw({0xebc45d?, 0x7ffd5721de30?}) runtime/panic.go:1101 +0x48 fp=0x7ffd5721de08 sp=0x7ffd5721ddd8 pc=0x4852e8 runtime.newosproc(0xc0000c7008) runtime/os_linux.go:201 +0x165 fp=0x7ffd5721de78 sp=0x7ffd5721de08 pc=0x445625 runtime.newm1(0xc0000c7008) runtime/proc.go:2829 +0xbf fp=0x7ffd5721deb8 sp=0x7ffd5721de78 pc=0x450eff runtime.newm(0x7faa0ecc9108?, 0xc00004c008, 0x7ffd5721e000?) runtime/proc.go:2804 +0x125 fp=0x7ffd5721dee8 sp=0x7ffd5721deb8 pc=0x450dc5 runtime.startm(0xc00004c008?, 0x0, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffd5721df38 sp=0x7ffd5721dee8 pc=0x4513b9 runtime.handoffp(0x2000?) runtime/proc.go:3039 +0x358 fp=0x7ffd5721df60 sp=0x7ffd5721df38 pc=0x451898 runtime.stoplockedm() runtime/proc.go:3161 +0x52 fp=0x7ffd5721dfb8 sp=0x7ffd5721df60 pc=0x451912 runtime.schedule() runtime/proc.go:3999 +0x3a fp=0x7ffd5721dff0 sp=0x7ffd5721dfb8 pc=0x4544fa runtime.park_m(0xc000002380) runtime/proc.go:4144 +0x285 fp=0x7ffd5721e050 sp=0x7ffd5721dff0 pc=0x454a25 runtime.mcall() runtime/asm_amd64.s:459 +0x4e fp=0x7ffd5721e068 sp=0x7ffd5721e050 pc=0x48ad2e runtime: failed to create new OS thread (have 3 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0x7fffd103b5f0?}) runtime/panic.go:1101 +0x48 fp=0x7fffd103b5c8 sp=0x7fffd103b598 pc=0x4852e8 runtime.newosproc(0xc0000c6808) runtime/os_linux.go:201 +0x165 fp=0x7fffd103b638 sp=0x7fffd103b5c8 pc=0x445625 runtime.newm1(0xc0000c6808) runtime/proc.go:2829 +0xbf fp=0x7fffd103b678 sp=0x7fffd103b638 pc=0x450eff runtime.newm(0x22723950f98b4?, 0xc00004e508, 0x7fffd103b718?) runtime/proc.go:2804 +0x125 fp=0x7fffd103b6a8 sp=0x7fffd103b678 pc=0x450dc5 runtime.startm(0xc00004e508?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0x7fffd103b6f8 sp=0x7fffd103b6a8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0x7fffd103b728 sp=0x7fffd103b6f8 pc=0x4855ac runtime.newproc.func1() runtime/proc.go:5030 +0x53 fp=0x7fffd103b758 sp=0x7fffd103b728 pc=0x456d13 runtime.systemstack(0x48f59f) runtime/asm_amd64.s:514 +0x4a fp=0x7fffd103b768 sp=0x7fffd103b758 pc=0x48adaa goroutine 1 gp=0xc000002380 m=0 mp=0x168a860 [running, locked to thread]: runtime.systemstack_switch() runtime/asm_amd64.s:479 +0x8 fp=0xc0000c05d8 sp=0xc0000c05c8 pc=0x48ad48 runtime.newproc(0x180ffffffff?) runtime/proc.go:5023 +0x3f fp=0xc0000c0610 sp=0xc0000c05d8 pc=0x456c7f runtime.init.7() runtime/proc.go:336 +0x1a fp=0xc0000c0628 sp=0xc0000c0610 pc=0x44c09a runtime.doInit1(0x15ac860) runtime/proc.go:7353 +0xd8 fp=0xc0000c0750 sp=0xc0000c0628 pc=0x45b978 runtime.doInit(...) runtime/proc.go:7320 runtime.main() runtime/proc.go:201 +0x11c fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bcbc runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a runtime: failed to create new OS thread (have 3 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0x7ffdbe88f3c0?}) runtime/panic.go:1101 +0x48 fp=0x7ffdbe88f398 sp=0x7ffdbe88f368 pc=0x4852e8 runtime.newosproc(0xc0000c6808) runtime/os_linux.go:201 +0x165 fp=0x7ffdbe88f408 sp=0x7ffdbe88f398 pc=0x445625 runtime.newm1(0xc0000c6808) runtime/proc.go:2829 +0xbf fp=0x7ffdbe88f448 sp=0x7ffdbe88f408 pc=0x450eff runtime.newm(0x22723950fbe3d?, 0xc00004e508, 0x7ffdbe88f4e8?) runtime/proc.go:2804 +0x125 fp=0x7ffdbe88f478 sp=0x7ffdbe88f448 pc=0x450dc5 runtime.startm(0xc00004e508?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffdbe88f4c8 sp=0x7ffdbe88f478 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0x7ffdbe88f4f8 sp=0x7ffdbe88f4c8 pc=0x4855ac runtime.newproc.func1() runtime/proc.go:5030 +0x53 fp=0x7ffdbe88f528 sp=0x7ffdbe88f4f8 pc=0x456d13 runtime.systemstack(0x48f59f) runtime/asm_amd64.s:514 +0x4a fp=0x7ffdbe88f538 sp=0x7ffdbe88f528 pc=0x48adaa goroutine 1 gp=0xc000002380 m=0 mp=0x168a860 [running, locked to thread]: runtime.systemstack_switch() runtime/asm_amd64.s:479 +0x8 fp=0xc0000c05d8 sp=0xc0000c05c8 pc=0x48ad48 runtime.newproc(0x180ffffffff?) runtime/proc.go:5023 +0x3f fp=0xc0000c0610 sp=0xc0000c05d8 pc=0x456c7f runtime.init.7() runtime/proc.go:336 +0x1a fp=0xc0000c0628 sp=0xc0000c0610 pc=0x44c09a runtime.doInit1(0x15ac860) runtime/proc.go:7353 +0xd8 fp=0xc0000c0750 sp=0xc0000c0628 pc=0x45b978 runtime.doInit(...) runtime/proc.go:7320 runtime.main() runtime/proc.go:201 +0x11c fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bcbc runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0xc0000dfe10?}) runtime/panic.go:1101 +0x48 fp=0xc0000dfde8 sp=0xc0000dfdb8 pc=0x4852e8 runtime.newosproc(0xc000100008) runtime/os_linux.go:201 +0x165 fp=0xc0000dfe58 sp=0xc0000dfde8 pc=0x445625 runtime.newm1(0xc000100008) runtime/proc.go:2829 +0xbf fp=0xc0000dfe98 sp=0xc0000dfe58 pc=0x450eff runtime.newm(0x22723951043a4?, 0xc000050a08, 0x0?) runtime/proc.go:2804 +0x125 fp=0xc0000dfec8 sp=0xc0000dfe98 pc=0x450dc5 runtime.startm(0xc000050a08?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0xc0000dff18 sp=0xc0000dfec8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0xc0000dff48 sp=0xc0000dff18 pc=0x4855ac runtime.resetspinning() runtime/proc.go:3885 +0x3e fp=0xc0000dff68 sp=0xc0000dff48 pc=0x45413e runtime.schedule() runtime/proc.go:4038 +0x10f fp=0xc0000dffa0 sp=0xc0000dff68 pc=0x4545cf runtime.mstart1() runtime/proc.go:1862 +0xcd fp=0xc0000dffc8 sp=0xc0000dffa0 pc=0x44f98d runtime.mstart0() runtime/proc.go:1808 +0x75 fp=0xc0000dfff8 sp=0xc0000dffc8 pc=0x44f895 runtime.mstart() runtime/asm_amd64.s:395 +0x5 fp=0xc0000e0000 sp=0xc0000dfff8 pc=0x48aca5 goroutine 1 gp=0xc000002380 m=nil [chan receive, locked to thread]: runtime.gopark(0x7ff32d608108?, 0x0?, 0x0?, 0x0?, 0x10?) runtime/proc.go:435 +0xce fp=0xc0000c0688 sp=0xc0000c0668 pc=0x48540e runtime.chanrecv(0xc000026100, 0x0, 0x1) runtime/chan.go:664 +0x445 fp=0xc0000c0700 sp=0xc0000c0688 pc=0x413f25 runtime.chanrecv1(0x0?, 0x2?) runtime/chan.go:506 +0x12 fp=0xc0000c0728 sp=0xc0000c0700 pc=0x413ab2 runtime.gcenable() runtime/mgc.go:206 +0xb1 fp=0xc0000c0750 sp=0xc0000c0728 pc=0x423611 runtime.main() runtime/proc.go:211 +0x165 fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bd05 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a goroutine 3 gp=0xc000002e00 m=nil [runnable]: runtime.gcenable.gowrap1() runtime/mgc.go:204 fp=0xc0000c17e0 sp=0xc0000c17d8 pc=0x4236a0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c17e8 sp=0xc0000c17e0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:204 +0x66 goroutine 4 gp=0xc000002fc0 m=nil [runnable]: runtime.gcenable.gowrap2() runtime/mgc.go:205 fp=0xc0000c1fe0 sp=0xc0000c1fd8 pc=0x423640 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c1fe8 sp=0xc0000c1fe0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:205 +0xa5 runtime stack: runtime.throw({0xebc45d?, 0x7ffcc2678a90?}) runtime/panic.go:1101 +0x48 fp=0x7ffcc2678a68 sp=0x7ffcc2678a38 pc=0x4852e8 runtime.newosproc(0xc0000c7008) runtime/os_linux.go:201 +0x165 fp=0x7ffcc2678ad8 sp=0x7ffcc2678a68 pc=0x445625 runtime.newm1(0xc0000c7008) runtime/proc.go:2829 +0xbf fp=0x7ffcc2678b18 sp=0x7ffcc2678ad8 pc=0x450eff runtime.newm(0x7ff32d608108?, 0xc00004c008, 0x7ffcc2678c60?) runtime/proc.go:2804 +0x125 fp=0x7ffcc2678b48 sp=0x7ffcc2678b18 pc=0x450dc5 runtime.startm(0xc00004c008?, 0x0, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffcc2678b98 sp=0x7ffcc2678b48 pc=0x4513b9 runtime.handoffp(0x2000?) runtime/proc.go:3039 +0x358 fp=0x7ffcc2678bc0 sp=0x7ffcc2678b98 pc=0x451898 runtime.stoplockedm() runtime/proc.go:3161 +0x52 fp=0x7ffcc2678c18 sp=0x7ffcc2678bc0 pc=0x451912 runtime.schedule() runtime/proc.go:3999 +0x3a fp=0x7ffcc2678c50 sp=0x7ffcc2678c18 pc=0x4544fa runtime.park_m(0xc000002380) runtime/proc.go:4144 +0x285 fp=0x7ffcc2678cb0 sp=0x7ffcc2678c50 pc=0x454a25 runtime.mcall() runtime/asm_amd64.s:459 +0x4e fp=0x7ffcc2678cc8 sp=0x7ffcc2678cb0 pc=0x48ad2e runtime: failed to create new OS thread (have 3 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0x7ffcd0849b80?}) runtime/panic.go:1101 +0x48 fp=0x7ffcd0849b58 sp=0x7ffcd0849b28 pc=0x4852e8 runtime.newosproc(0xc0000c6808) runtime/os_linux.go:201 +0x165 fp=0x7ffcd0849bc8 sp=0x7ffcd0849b58 pc=0x445625 runtime.newm1(0xc0000c6808) runtime/proc.go:2829 +0xbf fp=0x7ffcd0849c08 sp=0x7ffcd0849bc8 pc=0x450eff runtime.newm(0x22723950ffee4?, 0xc00004e508, 0x7ffcd0849ca8?) runtime/proc.go:2804 +0x125 fp=0x7ffcd0849c38 sp=0x7ffcd0849c08 pc=0x450dc5 runtime.startm(0xc00004e508?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffcd0849c88 sp=0x7ffcd0849c38 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0x7ffcd0849cb8 sp=0x7ffcd0849c88 pc=0x4855ac runtime.newproc.func1() runtime/proc.go:5030 +0x53 fp=0x7ffcd0849ce8 sp=0x7ffcd0849cb8 pc=0x456d13 runtime.systemstack(0x48f59f) runtime/asm_amd64.s:514 +0x4a fp=0x7ffcd0849cf8 sp=0x7ffcd0849ce8 pc=0x48adaa goroutine 1 gp=0xc000002380 m=0 mp=0x168a860 [running, locked to thread]: runtime.systemstack_switch() runtime/asm_amd64.s:479 +0x8 fp=0xc0000c05d8 sp=0xc0000c05c8 pc=0x48ad48 runtime.newproc(0x180ffffffff?) runtime/proc.go:5023 +0x3f fp=0xc0000c0610 sp=0xc0000c05d8 pc=0x456c7f runtime.init.7() runtime/proc.go:336 +0x1a fp=0xc0000c0628 sp=0xc0000c0610 pc=0x44c09a runtime.doInit1(0x15ac860) runtime/proc.go:7353 +0xd8 fp=0xc0000c0750 sp=0xc0000c0628 pc=0x45b978 runtime.doInit(...) runtime/proc.go:7320 runtime.main() runtime/proc.go:201 +0x11c fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bcbc runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a go: error obtaining buildID for go tool compile: exit status 2 runtime: failed to create new OS thread (have 3 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0x7ffcecbea3f0?}) runtime/panic.go:1101 +0x48 fp=0x7ffcecbea3c8 sp=0x7ffcecbea398 pc=0x4852e8 runtime.newosproc(0xc0000c6808) runtime/os_linux.go:201 +0x165 fp=0x7ffcecbea438 sp=0x7ffcecbea3c8 pc=0x445625 runtime.newm1(0xc0000c6808) runtime/proc.go:2829 +0xbf fp=0x7ffcecbea478 sp=0x7ffcecbea438 pc=0x450eff runtime.newm(0x22723950f8040?, 0xc00004e508, 0x7ffcecbea518?) runtime/proc.go:2804 +0x125 fp=0x7ffcecbea4a8 sp=0x7ffcecbea478 pc=0x450dc5 runtime.startm(0xc00004e508?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffcecbea4f8 sp=0x7ffcecbea4a8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0x7ffcecbea528 sp=0x7ffcecbea4f8 pc=0x4855ac runtime.newproc.func1() runtime/proc.go:5030 +0x53 fp=0x7ffcecbea558 sp=0x7ffcecbea528 pc=0x456d13 runtime.systemstack(0x48f59f) runtime/asm_amd64.s:514 +0x4a fp=0x7ffcecbea568 sp=0x7ffcecbea558 pc=0x48adaa goroutine 1 gp=0xc000002380 m=0 mp=0x168a860 [running, locked to thread]: runtime.systemstack_switch() runtime/asm_amd64.s:479 +0x8 fp=0xc0000c05d8 sp=0xc0000c05c8 pc=0x48ad48 runtime.newproc(0x180ffffffff?) runtime/proc.go:5023 +0x3f fp=0xc0000c0610 sp=0xc0000c05d8 pc=0x456c7f runtime.init.7() runtime/proc.go:336 +0x1a fp=0xc0000c0628 sp=0xc0000c0610 pc=0x44c09a runtime.doInit1(0x15ac860) runtime/proc.go:7353 +0xd8 fp=0xc0000c0750 sp=0xc0000c0628 pc=0x45b978 runtime.doInit(...) runtime/proc.go:7320 runtime.main() runtime/proc.go:201 +0x11c fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bcbc runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a go: error obtaining buildID for go tool compile: exit status 2 go: error obtaining buildID for go tool compile: exit status 2 runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime: failed to create new OS thread (have 5 already; errno=11) runtime: may need to increase max user processes (ulimit -u) fatal error: newosproc runtime stack: runtime.throw({0xebc45d?, 0x7ffdd6c08580?}) runtime/panic.go:1101 +0x48 fp=0x7ffdd6c08558 sp=0x7ffdd6c08528 pc=0x4852e8 runtime.newosproc(0xc0000c7008) runtime/os_linux.go:201 +0x165 fp=0x7ffdd6c085c8 sp=0x7ffdd6c08558 pc=0x445625 runtime.newm1(0xc0000c7008) runtime/proc.go:2829 +0xbf fp=0x7ffdd6c08608 sp=0x7ffdd6c085c8 pc=0x450eff runtime.newm(0x7fcc61925108?, 0xc00004c008, 0x7ffdd6c08750?) runtime/proc.go:2804 +0x125 fp=0x7ffdd6c08638 sp=0x7ffdd6c08608 pc=0x450dc5 runtime.startm(0xc00004c008?, 0x0, 0x0) runtime/proc.go:2998 +0x159 fp=0x7ffdd6c08688 sp=0x7ffdd6c08638 pc=0x4513b9 runtime.handoffp(0x2000?) runtime/proc.go:3039 +0x358 fp=0x7ffdd6c086b0 sp=0x7ffdd6c08688 pc=0x451898 runtime.stoplockedm() runtime/proc.go:3161 +0x52 fp=0x7ffdd6c08708 sp=0x7ffdd6c086b0 pc=0x451912 runtime.schedule() runtime/proc.go:3999 +0x3a fp=0x7ffdd6c08740 sp=0x7ffdd6c08708 pc=0x4544fa runtime.park_m(0xc000002380) runtime/proc.go:4144 +0x285 fp=0x7ffdd6c087a0 sp=0x7ffdd6c08740 pc=0x454a25 runtime.mcall() runtime/asm_amd64.s:459 +0x4e fp=0x7ffdd6c087b8 sp=0x7ffdd6c087a0 pc=0x48ad2e goroutine 1 gp=0xc000002380 m=nil [chan receive, locked to thread]: runtime.gopark(0x7fcc61925108?, 0x0?, 0x0?, 0x0?, 0x10?) runtime/proc.go:435 +0xce fp=0xc0000c0688 sp=0xc0000c0668 pc=0x48540e runtime.chanrecv(0xc000026100, 0x0, 0x1) runtime/chan.go:664 +0x445 fp=0xc0000c0700 sp=0xc0000c0688 pc=0x413f25 runtime.chanrecv1(0x0?, 0x2?) runtime/chan.go:506 +0x12 fp=0xc0000c0728 sp=0xc0000c0700 pc=0x413ab2 runtime.gcenable() runtime/mgc.go:206 +0xb1 fp=0xc0000c0750 sp=0xc0000c0728 pc=0x423611 runtime.main() runtime/proc.go:211 +0x165 fp=0xc0000c07e0 sp=0xc0000c0750 pc=0x44bd05 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c07e8 sp=0xc0000c07e0 pc=0x48cbe1 goroutine 2 gp=0xc0000028c0 m=nil [runnable]: runtime.forcegchelper() runtime/proc.go:339 fp=0xc0000c0fe0 sp=0xc0000c0fd8 pc=0x44c0c0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c0fe8 sp=0xc0000c0fe0 pc=0x48cbe1 created by runtime.init.7 in goroutine 1 runtime/proc.go:336 +0x1a goroutine 3 gp=0xc000002e00 m=nil [runnable]: runtime.gcenable.gowrap1() runtime/mgc.go:204 fp=0xc0000c17e0 sp=0xc0000c17d8 pc=0x4236a0 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c17e8 sp=0xc0000c17e0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:204 +0x66 goroutine 4 gp=0xc000002fc0 m=nil [runnable]: runtime.gcenable.gowrap2() runtime/mgc.go:205 fp=0xc0000c1fe0 sp=0xc0000c1fd8 pc=0x423640 runtime.goexit({}) runtime/asm_amd64.s:1700 +0x1 fp=0xc0000c1fe8 sp=0xc0000c1fe0 pc=0x48cbe1 created by runtime.gcenable in goroutine 1 runtime/mgc.go:205 +0xa5 runtime stack: runtime.throw({0xebc45d?, 0xc0000dfe10?}) runtime/panic.go:1101 +0x48 fp=0xc0000dfde8 sp=0xc0000dfdb8 pc=0x4852e8 runtime.newosproc(0xc000100008) runtime/os_linux.go:201 +0x165 fp=0xc0000dfe58 sp=0xc0000dfde8 pc=0x445625 runtime.newm1(0xc000100008) runtime/proc.go:2829 +0xbf fp=0xc0000dfe98 sp=0xc0000dfe58 pc=0x450eff runtime.newm(0x22723951070d5?, 0xc000050a08, 0x0?) runtime/proc.go:2804 +0x125 fp=0xc0000dfec8 sp=0xc0000dfe98 pc=0x450dc5 runtime.startm(0xc000050a08?, 0x1, 0x0) runtime/proc.go:2998 +0x159 fp=0xc0000dff18 sp=0xc0000dfec8 pc=0x4513b9 runtime.wakep() runtime/proc.go:3145 +0xec fp=0xc0000dff48 sp=0xc0000dff18 pc=0x4855ac runtime.resetspinning() runtime/proc.go:3885 +0x3e fp=0xc0000dff68 sp=0xc0000dff48 pc=0x45413e runtime.schedule() runtime/proc.go:4038 +0x10f fp=0xc0000dffa0 sp=0xc0000dff68 pc=0x4545cf runtime.mstart1() runtime/proc.go:1862 +0xcd fp=0xc0000dffc8 sp=0xc0000dffa0 pc=0x44f98d runtime.mstart0() runtime/proc.go:1808 +0x75 fp=0xc0000dfff8 sp=0xc0000dffc8 pc=0x44f895 runtime.mstart() runtime/asm_amd64.s:395 +0x5 fp=0xc0000e0000 sp=0xc0000dfff8 pc=0x48aca5 go: error obtaining buildID for go tool compile: exit status 2 go: error obtaining buildID for go tool compile: exit status 2 go: error obtaining buildID for go tool compile: exit status 2 go: error obtaining buildID for go tool compile: exit status 2 go: error obtaining buildID for go tool compile: exit status 2 go: unlinkat /tmp/go-build3803278121: directory not empty error: Bad exit status from /home/pterjan/rpmbuild/tmp/rpm-tmp.hjBSsy (%check) RPM build errors: Bad exit status from /home/pterjan/rpmbuild/tmp/rpm-tmp.hjBSsy (%check) I: [iurt_root_command] ERROR: chroot