Home / Go / go code snippets edit
Try Documentalist, my app that offers fast, offline access to 190+ programmer API docs.

must
func must(err error) {
	if err != nil {
		panic(err.Error())
	}
}
panicIf
func panicIf(cond bool, arg ...interface{}) {
	if !cond {
		return
	}
	s := "condition failed"
	if len(arg) > 0 {
		s = fmt.Sprintf("%s", arg[0])
		if len(arg) > 1 {
			s = fmt.Sprintf(s, arg[1:]...)
		}
	}
	panic(s)
}
logf
func logf(s string, arg ...interface{}) {
	if len(arg) > 0 {
		s = fmt.Sprintf(s, arg...)
	}
	fmt.Print(s)
}
logIfErr
func logIfErr(err error) {
	if err != nil {
		logf(err.Error())
	}
}
isWindows
func isWindows() bool {
	return strings.Contains(runtime.GOOS, "windows")
}
userHomeDirMust
func userHomeDirMust() string {
	s, err := os.UserHomeDir()
	must(err)
	return s
}
humanizeSize
func humanizeSize(i int64) string {
	const (
		kb = 1024
		mb = kb * 1024
		gb = mb * 1024
	)
	fs := func(n int64, d float64, size string) string {
		s := fmt.Sprintf("%.2f", float64(n)/d)
		return strings.TrimSuffix(s, ".00") + " " + size
	}

	if i > gb {
		return fs(i, gb, "GB")
	}
	if i > mb {
		return fs(i, mb, "MB")
	}
	if i > kb {
		return fs(i, kb, "kB")
	}
	return fmt.Sprintf("%d bytes", i)
}
non-blocking channel send
// if ch if full we will not block, thanks to default case
select {
case ch <- value:
default:
}
openBrowser
// from https://gist.github.com/hyg/9c4afcd91fe24316cbf0
func openBrowser(url string) {
	var err error

	switch runtime.GOOS {
	case "linux":
		err = exec.Command("xdg-open", url).Start()
	case "windows":
		err = exec.Command("rundll32", "url.dll,FileProtocolHandler", url).Start()
	case "darwin":
		err = exec.Command("open", url).Start()
	default:
		err = fmt.Errorf("unsupported platform")
	}
	if err != nil {
		log.Fatal(err)
	}
}
normalizeNewlines
func normalizeNewlines(d []byte) []byte {
	// replace CR LF (windows) with LF (unix)
	d = bytes.Replace(d, []byte{13, 10}, []byte{10}, -1)
	// replace CF (mac) with LF (unix)
	d = bytes.Replace(d, []byte{13}, []byte{10}, -1)
	return d
}
bytesRemoveFirstLine
// return first line of d and the rest
func bytesRemoveFirstLine(d []byte) (string, []byte) {
	idx := bytes.IndexByte(d, 10)
	//u.PanicIf(-1 == idx)
	if -1 == idx {
		return string(d), nil
	}
	l := d[:idx]
	return string(l), d[idx+1:]
}
sliceRemoveDuplicateStrings
// sliceRemoveDuplicateStrings removes duplicate strings from an array of strings.
// It's optimized for the case of no duplicates. It modifes a in place.
func sliceRemoveDuplicateStrings(a []string) []string {
    if len(a) < 2 {
        return a
    }
    sort.Strings(a)
    writeIdx := 1
    for i := 1; i < len(a); i++ {
        if a[i-1] == a[i] {
            continue
        }
        if writeIdx != i {
            a[writeIdx] = a[i]
        }
        writeIdx++
    }
    return a[:writeIdx]
}
stringInSlice
func stringInSlice(a []string, toCheck string) bool {
	for _, s := range a {
		if s == toCheck {
			return true
		}
	}
	return false
}
ProgressEstimator
package util

import (
	"sync"
	"time"
)

// ProgressEstimatorData contains fields readable after Next()
type ProgressEstimatorData struct {
	Total             int
	Curr              int
	Left              int
	PercDone          float64 // 0...1
	Skipped           int
	TimeSoFar         time.Duration
	EstimatedTimeLeft time.Duration
}

// ProgressEstimator is for estimating progress
type ProgressEstimator struct {
	timeStart time.Time
	sync.Mutex
	ProgressEstimatorData
}

// NewProgressEstimator creates a ProgressEstimator
func NewProgressEstimator(total int) *ProgressEstimator {
	d := ProgressEstimatorData{
		Total: total,
	}
	return &ProgressEstimator{
		ProgressEstimatorData: d,
		timeStart:             time.Now(),
	}
}

// Next advances estimator
func (pe *ProgressEstimator) next(isSkipped bool) ProgressEstimatorData {
	pe.Lock()
	defer pe.Unlock()
	if isSkipped {
		pe.Skipped++
	}
	pe.Curr++
	pe.Left = pe.Total - pe.Curr
	pe.TimeSoFar = time.Since(pe.timeStart)

	realTotal := pe.Total - pe.Skipped
	realCurr := pe.Curr - pe.Skipped
	if realCurr == 0 || realTotal == 0 {
		pe.EstimatedTimeLeft = pe.TimeSoFar
	} else {
		pe.PercDone = float64(realCurr) / float64(realTotal) // 0..1 range
		realPerc := float64(realTotal) / float64(realCurr)
		estimatedTotalTime := float64(pe.TimeSoFar) * realPerc
		pe.EstimatedTimeLeft = time.Duration(estimatedTotalTime) - pe.TimeSoFar
	}
	cpy := pe.ProgressEstimatorData
	return cpy
}

// Next advances estimator
func (pe *ProgressEstimator) Next() ProgressEstimatorData {
	return pe.next(false)
}

// Skip advances estimator but allows to mark this file as taking no time,
// to allow better estimates
func (pe *ProgressEstimator) Skip() ProgressEstimatorData {
	return pe.next(true)
}
Debouncer
// Debouncer runs a given function, debounced
type Debouncer struct {
    currDebounceID *int32
}

// NewDebouncer creates a new Debouncer
func NewDebouncer() *Debouncer {
    return &Debouncer{}
}

func (d *Debouncer) debounce(f func(), timeout time.Duration) {
    if d.currDebounceID != nil {
        // stop currently scheduled function
        v := atomic.AddInt32(d.currDebounceID, 1)
        d.currDebounceID = nil
        if v > 1 {
            // it was already executed
            return
        }
    }

    d.currDebounceID = new(int32)
    go func(f func(), timeout time.Duration, debounceID *int32) {
        for {
            select {
            case <-time.After(timeout):
                v := atomic.AddInt32(debounceID, 1)
                // if v != 1, it was cancelled
                if v == 1 {
                    f()
                }
            }
        }
    }(f, timeout, d.currDebounceID)
}

var articleLoadDebouncer *Debouncer

func reloadArticlesDelayed() {
    if articleLoadDebouncer == nil {
        articleLoadDebouncer = NewDebouncer()
    }
    articleLoadDebouncer.debounce(loadArticles, time.Second)
}
mimeTypeFromFileName
func mimeTypeFromFileName(path string) string {
	var mimeTypes = map[string]string{
		// this is a list from go's mime package
		".css":  "text/css; charset=utf-8",
		".gif":  "image/gif",
		".htm":  "text/html; charset=utf-8",
		".html": "text/html; charset=utf-8",
		".jpg":  "image/jpeg",
		".js":   "application/javascript",
		".wasm": "application/wasm",
		".pdf":  "application/pdf",
		".png":  "image/png",
		".svg":  "image/svg+xml",
		".xml":  "text/xml; charset=utf-8",

		// those are my additions
		".txt":  "text/plain",
		".exe":  "application/octet-stream",
		".json": "application/json",
	}

	ext := strings.ToLower(filepath.Ext(path))
	mt := mimeTypes[ext]
	if mt != "" {
		return mt
	}
	// if not given, default to this
	return "application/octet-stream"
}
formatDuration / FormattedDuration
// time.Duration with a better string representation
type FormattedDuration time.Duration

func (d FormattedDuration) String() string {
	return formatDuration(time.Duration(d))
}

// formats duration in a more human friendly way
// than time.Duration.String()
func formatDuration(d time.Duration) string {
	s := d.String()
	if strings.HasSuffix(s, "µs") {
		// for µs we don't want fractions
		parts := strings.Split(s, ".")
		if len(parts) > 1 {
			return parts[0] + " µs"
		}
		return strings.ReplaceAll(s, "µs", " µs")
	} else if strings.HasSuffix(s, "ms") {
		// for ms we only want 2 digit fractions
		parts := strings.Split(s, ".")
		//fmt.Printf("fmtDur: '%s' => %#v\n", s, parts)
		if len(parts) > 1 {
			s2 := parts[1]
			if len(s2) > 4 {
				// 2 for "ms" and 2+ for fraction
				res := parts[0] + "." + s2[:2] + " ms"
				//fmt.Printf("fmtDur: s2: '%s', res: '%s'\n", s2, res)
				return res
			}
		}
		return strings.ReplaceAll(s, "ms", " ms")
	}
	return s
}
pathExists
func pathExists(path string) bool {
	_, err := os.Lstat(path)
	return err == nil
}
dirExists
func dirExists(path string) bool {
	st, err := os.Lstat(path)
	return err == nil && st.IsDir()
}
getFileSize
func getFileSize(path string) int64 {
	st, err := os.Lstat(path)
	if err == nil {
		return st.Size()
	}
	return -1
}
expandTildeInPath
func expandTildeInPath(s string) string {
	if strings.HasPrefix(s, "~") {
		dir, err := os.UserHomeDir()
		must(err)
		return dir + s[1:]
	}
	return s
}
copyFile
func copyFile(dstPath, srcPath string) error {
	d, err := os.ReadFile(srcPath)
	if err != nil {
		return err
	}
	err = os.MkdirAll(filepath.Dir(dstPath), 0755)
	if err != nil {
		return err
	}
	return os.WriteFile(dstPath, d, 0644)
}
copyFileMoreEfficient
func mkdirForFile(filePath string) error {
	dir := filepath.Dir(filePath)
	return os.MkdirAll(dir, 0755)
}

func copyFileMoreEfficient(dst string, src string) error {
	err := mkdirForFile(dst)
	if err != nil {
		return err
	}
	fin, err := os.Open(src)
	if err != nil {
		return err
	}
	defer fin.Close()
	fout, err := os.Create(dst)
	if err != nil {
		return err
	}

	_, err = io.Copy(fout, fin)
	err2 := fout.Close()
	if err != nil || err2 != nil {
		os.Remove(dst)
	}

	return err
}
createDirForFile
func createDirForFile(path string) error {
	dir := filepath.Dir(path)
	return os.MkdirAll(dir, 0755)
}
runCmdMust
func fmtCmdShort(cmd exec.Cmd) string {
	cmd.Path = filepath.Base(cmd.Path)
	return cmd.String()
}

func runCmdMust(cmd *exec.Cmd) string {
	fmt.Printf("> %s\n", fmtCmdShort(*cmd))
	canCapture := (cmd.Stdout == nil) && (cmd.Stderr == nil)
	if canCapture {
		out, err := cmd.CombinedOutput()
		if err == nil {
			if len(out) > 0 {
				logf("Output:\n%s\n", string(out))
			}
			return string(out)
		}
		logf("cmd '%s' failed with '%s'. Output:\n%s\n", cmd, err, string(out))
		must(err)
		return string(out)
	}
	err := cmd.Run()
	if err == nil {
		return ""
	}
	logf("cmd '%s' failed with '%s'\n", cmd, err)
	must(err)
	return ""
}
runCmdLogged
func runCmdLogged(cmd *exec.Cmd) error {
	cmd.Stdin = os.Stdin
	cmd.Stdout = os.Stdout
	cmd.Stderr = os.Stderr
	return cmd.Run()
}
readGzippedFile
func readGzippedFile(path string) ([]byte, error) {
	file, err := os.Open(path)
	if err != nil {
		return nil, err
	}
	defer file.Close()
	gr, err := gzip.NewReader(file)
	if err != nil {
		return nil, err
	}
	defer gr.Close()
	return ioutil.ReadAll(gr)
}
GzippedReadCloser
// GzippedReadCloser is a io.ReadCloser for a gzip file
type GzippedReadCloser struct {
	f *os.File
	r io.Reader
}

// Close closes a reader
func (rc *GzippedReadCloser) Close() error {
	return rc.f.Close()
}

// Read reads data from a reader
func (rc *GzippedReadCloser) Read(d []byte) (int, error) {
	return rc.r.Read(d)
}

func openGzipped(path string) (io.ReadCloser, error) {
	f, err := os.Open(path)
	if err != nil {
		return nil, err
	}
	r, err := gzip.NewReader(f)
	if err != nil {
		return nil, err
	}
	rc := &GzippedReadCloser{
		f: f,
		r: r,
	}
	return rc, nil
}

func readGzipped(path string) ([]byte, error) {
	rc, err := openGzipped(path)
	if err != nil {
		return nil, err
	}
	defer rc.Close()
	d, err := ioutil.ReadAll(rc)
	if err != nil {
		return nil, err
	}
	return d, nil
}
readLinesUsingScanner
func readLinesUsingScanner(filePath string) ([]string, error) {
    file, err := os.OpenFile(filePath, os.O_RDONLY, 0666)
    if err != nil {
        return nil, err
    }
    defer file.Close()
    scanner := bufio.NewScanner(file)
    res := make([]string, 0)
    for scanner.Scan() {
        line := scanner.Bytes()
        res = append(res, string(line))
    }
    if err = scanner.Err(); err != nil {
        return nil, err
    }
    return res, nil
}
readLinesWithSplit
func readLinesWithSplit(path string) ([]string, error) {
	d, err := ioutil.ReadFile(path)
	if err != nil {
		return nil, err
	}
	s := string(d)
	res := strings.Split(s, "\n")
	return res, nil
}
unzip .zip file to a directory
func recreateDir(dir string) error {
	err := os.RemoveAll(dir)
	if err != nil {
		return err
	}
	return os.MkdirAll(dir, 0755)
}

func createDirForFile(path string) error {
	dir := filepath.Dir(path)
	return os.MkdirAll(dir, 0755)
}

func unzipFile(f *zip.File, dstPath string) error {
	r, err := f.Open()
	if err != nil {
		return err
	}
	defer r.Close()

	err = createDirForFile(dstPath)
	if err != nil {
		return err
	}

	w, err := os.Create(dstPath)
	if err != nil {
		return err
	}
	_, err = io.Copy(w, r)
	if err != nil {
		w.Close()
		os.Remove(dstPath)
		return err
	}
	err = w.Close()
	if err != nil {
		os.Remove(dstPath)
		return err
	}
	return nil
}

func unzip(zipPath string, destDir string) error {
	st, err := os.Stat(zipPath)
	if err != nil {
		return err
	}
	fileSize := st.Size()
	f, err := os.Open(zipPath)
	if err != nil {
		return err
	}
	defer f.Close()

	zr, err := zip.NewReader(f, fileSize)
	if err != nil {
		return err
	}
	err = recreateDir(destDir)
	if err != nil {
		return err
	}

	for _, fi := range zr.File {
		if fi.FileInfo().IsDir() {
			continue
		}
		destPath := filepath.Join(destDir, fi.Name)
		err = unzipFile(fi, destPath)
		if err != nil {
			os.RemoveAll(destDir)
			return err
		}
	}
	return nil
}
httpDownload
// can be used for http.Get() requests with better timeouts. New one must be created
// for each Get() request
func newTimeoutClient(connectTimeout time.Duration, readWriteTimeout time.Duration) *http.Client {
	timeoutDialer := func(cTimeout time.Duration, rwTimeout time.Duration) func(net, addr string) (c net.Conn, err error) {
		return func(netw, addr string) (net.Conn, error) {
			conn, err := net.DialTimeout(netw, addr, cTimeout)
			if err != nil {
				return nil, err
			}
			conn.SetDeadline(time.Now().Add(rwTimeout))
			return conn, nil
		}
	}

	return &http.Client{
		Transport: &http.Transport{
			Dial:  timeoutDialer(connectTimeout, readWriteTimeout),
			Proxy: http.ProxyFromEnvironment,
		},
	}
}

func httpDownload(url string) ([]byte, error) {
    // default timeout for http.Get() is really long, so dial it down
    // for both connection and read/write timeouts
    timeoutClient := newTimeoutClient(time.Second*120, time.Second*120)
    resp, err := timeoutClient.Get(url)
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()
    if resp.StatusCode != 200 {
        return nil, errors.New(fmt.Sprintf("'%s': status code not 200 (%d)", url, resp.StatusCode))
    }
    return ioutil.ReadAll(resp.Body)
}
set/get firestore document
func panicIfErr(err error) {
	if err != nil {
		panic(err.Error())
	}
}

type Foo struct {
	Str     string   `json:"str"`
	Str2    string   `json:"str-2" firestore:"str-2"`
	StrArr  []string `json:"strarr"`
	StrArr2 []string `json:"str-arr" firestore:"str-arr"`
}

func dbTestSetGet() {
	db, ctx := getDB()
	d := map[string]interface{}{
		"str":     "string value str",
		"str-2":   "string value str-2",
		"strarr":  []string{"string", "array", "for", "strarr"},
		"str-arr": []string{"string", "array", "for", "str-arr"},
	}
	_, err := db.Collection("foo").Doc("bar").Set(ctx, d)
	panicIfErr(err)
	doc, err := db.Collection("foo").Doc("bar").Get(ctx)
	panicIfErr(err)
	data := doc.Data()
	fmt.Printf("Data: %#v\n", data)
	var foo Foo
	err = doc.DataTo(&foo)
	panicIfErr(err)
	fmt.Printf("foo: %#v\n", foo)
}
sha1OfFile, sha1HexOfFile
func sha1OfFile(path string) ([]byte, error) {
	f, err := os.Open(path)
	if err != nil {
		//fmt.Printf("os.Open(%s) failed with %s\n", path, err.Error())
		return nil, err
	}
	defer f.Close()
	h := sha1.New()
	_, err = io.Copy(h, f)
	if err != nil {
		//fmt.Printf("io.Copy() failed with %s\n", err.Error())
		return nil, err
	}
	return h.Sum(nil), nil
}

func sha1HexOfFile(path string) (string, error) {
	sha1, err := sha1OfFile(path)
	if err != nil {
		return "", err
	}
	return fmt.Sprintf("%x", sha1), nil
}
cdUpDir
func currDirAbsMust() string {
	dir, err := filepath.Abs(".")
	must(err)
	return dir
}

// we are executed for do/ directory so top dir is parent dir
func cdUpDir(dirName string) {
	startDir := currDirAbsMust()
	dir := startDir
	for {
		// we're already in top directory
		if filepath.Base(dir) == dirName && dirExists(dir) {
			err := os.Chdir(dir)
			must(err)
			return
		}
		parentDir := filepath.Dir(dir)
		panicIf(dir == parentDir, "invalid startDir: '%s', dir: '%s'", startDir, dir)
		dir = parentDir
	}
}

Feedback about page:

Feedback:
Optional: your email if you want me to get back to you:

Need fast, offline access to 190+ programmer API docs? Try my app Documentalist for Windows