Skip to content

leetcode-golang-classroom/golang-with-concurrency-with-error-group

Folders and files

NameName
Last commit message
Last commit date

Latest commit

 

History

1 Commit
 
 
 
 
 
 
 
 
 
 
 
 

Repository files navigation

golang-with-concurrency-with-error-group

This repository is for handle concurrency error with error group

example

  1. fetch 5 urls with 2 goroutines concurrently
package main

import (
	"fmt"
	"io"
	"net/http"
	"sync"

	"golang.org/x/sync/errgroup"
)

var urls = []string{
	"https://www.codeheim.io",
	"https://golang.org",
	"https://pkg.go.dev/golang.org/x/sync/errgroup",
	"https://invalid-url",
}

func fetchPage(url string, mu *sync.Mutex, responses *map[string]string) error {
	// Fetch the url content
	resp, err := http.Get(url)
	if err != nil {
		fmt.Printf("failed to fetch %s: %s\n", url, err)
		return fmt.Errorf("failed to fetch %s: %w", url, err)
	}
	defer resp.Body.Close()
	fmt.Printf("Successfully fetched %s\n", url)
	// Read the response body
	body, err := io.ReadAll(resp.Body)
	if err != nil {
		return fmt.Errorf("failed to read response from %s: %w", url, err)
	}
	// Store the result in the map
	mu.Lock()
	(*responses)[url] = string(body)
	mu.Unlock()
	fmt.Printf("Successfully fetched response body of %s\n", url)
	return nil
}
func main() {
	var g errgroup.Group
	g.SetLimit(2)
	// Create a map to store the responses
	responses := make(map[string]string)
	var mu sync.Mutex
	for _, url := range urls {
		// Start a goroutine for each URL
		g.Go(func() error {
			return fetchPage(url, &mu, &responses)
		})
	}
	// Wait for all goroutines to finish and collect errors
	if err := g.Wait(); err != nil {
		fmt.Println("Error occurred:", err)
	} else {
		fmt.Println("All URLs fetched successfully!")
		// Print the responses
		for url, content := range responses {
			fmt.Printf("Response from %s: %s\n", url, content[:100]) // Print the first 100
		}
	}

}
  1. errgroup with content
func fetchPage(ctx context.Context, url string, mu *sync.Mutex, responses *map[string]string) error {
	select {
	case <-ctx.Done():
		// The context is done; exit early
		fmt.Println("Context canceled:", ctx.Err())
		return nil
	default:
		// Fetch the url content
		resp, err := http.Get(url)
		if err != nil {
			fmt.Printf("failed to fetch %s: %s\n", url, err)
			return fmt.Errorf("failed to fetch %s: %w", url, err)
		}
		defer resp.Body.Close()
		fmt.Printf("Successfully fetched %s\n", url)
		// Read the response body
		body, err := io.ReadAll(resp.Body)
		if err != nil {
			return fmt.Errorf("failed to read response from %s: %w", url, err)
		}
		// Store the result in the map
		mu.Lock()
		(*responses)[url] = string(body)
		mu.Unlock()
		fmt.Printf("Successfully fetched response body of %s\n", url)
	}
	return nil
}
func main() {
	g, ctx := errgroup.WithContext(context.Background())

	g.SetLimit(2)
	// Create a map to store the responses
	responses := make(map[string]string)
	var mu sync.Mutex
	for _, url := range urls {
		// Start a goroutine for each URL
		g.Go(func() error {
			return fetchPage(ctx, url, &mu, &responses)
		})
	}
	// Wait for all goroutines to finish and collect errors
	if err := g.Wait(); err != nil {
		fmt.Println("Error occurred:", err)
	} else {
		fmt.Println("All URLs fetched successfully!")
		// Print the responses
		for url, content := range responses {
			fmt.Printf("Response from %s: %s\n", url, content[:100]) // Print the first 100
		}
	}

}

About

No description, website, or topics provided.

Resources

Stars

Watchers

Forks

Releases

No releases published

Packages

No packages published

Languages