Skip to content

Instantly share code, notes, and snippets.

@zyxar
Last active January 15, 2025 06:09
Show Gist options
  • Save zyxar/2317744 to your computer and use it in GitHub Desktop.
Save zyxar/2317744 to your computer and use it in GitHub Desktop.
tour.golang exercise solutions
/* Exercise: Loops and Functions #43 */
package main
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := float64(2.)
s := float64(0)
for {
z = z - (z*z - x)/(2*z)
if math.Abs(s-z) < 1e-15 {
break
}
s = z
}
return s
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(math.Sqrt(2))
}
/******************************************************************************************************/
/* Exercise: Maps #44 */
package main
import (
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
ss := strings.Fields(s)
num := len(ss)
ret := make(map[string]int)
for i := 0; i < num; i++ {
(ret[ss[i]])++
}
return ret
}
func main() {
wc.Test(WordCount)
}
/******************************************************************************************************/
/* Exercise: Slices #45 */
package main
import "tour/pic"
func Pic(dx, dy int) [][]uint8 {
ret := make([][]uint8, dy)
for i := 0; i < dy; i++ {
ret[i] = make([]uint8, dx)
for j := 0; j < dx; j++ {
ret[i][j] = uint8(i^j+(i+j)/2)
}
}
return ret
}
func main() {
pic.Show(Pic)
}
/******************************************************************************************************/
/* Exercise: Fibonacci closure #46 */
package main
import "fmt"
// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
var x, y = 0, 1
return func() (z int) {
z, x, y = x, y, x+y
return
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
/******************************************************************************************************/
/* Advanced Exercise: Complex cube roots #47 */
package main
import (
"fmt"
"math/cmplx"
)
func Cbrt(x complex128) complex128 {
z := complex128(2)
s := complex128(0)
for {
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z))
if cmplx.Abs(s-z) < 1e-17 {
break
}
s = z
}
return z
}
func main() {
fmt.Println(Cbrt(2))
}
/******************************************************************************************************/
/* Exercise: Errors #57 */
package main
import (
"fmt"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}
func Sqrt(f float64) (float64, error) {
if f < 0 {
return 0, ErrNegativeSqrt(f)
}
return 0, nil
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
/******************************************************************************************************/
/* Exercise: Images #58 */
package main
import (
"image"
"tour/pic"
"image/color"
)
type Image struct{
Width, Height int
colr uint8
}
func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.Width, r.Height)
}
func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (r *Image) At(x, y int) color.Color {
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255}
}
func main() {
m := Image{100, 100, 128}
pic.ShowImage(&m)
}
/******************************************************************************************************/
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n,err = rot.r.Read(p)
for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
/******************************************************************************************************/
/* Exercise: Equivalent Binary Trees #67 */
package main
import (
"tour/tree"
"fmt"
)
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_walk(t, ch)
close(ch)
}
func _walk(t *tree.Tree, ch chan int) {
if t != nil {
_walk(t.Left, ch)
ch <- t.Value
_walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := range ch1 {
if i != <- ch2 {
return false
}
}
return true
}
func main() {
//tree.New(2)
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Print(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
/******************************************************************************************************/
/* Exercise: Web Crawler #69 */
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
func Krawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}
Urls <- urls
}
func Crawl(url string, depth int, fetcher Fetcher) {
Urls := make(chan []string)
go Krawl(url, fetcher, Urls)
band := 1
store[url] = true // init for level 0 done
for i := 0; i < depth; i++ {
for band > 0 {
band--
next := <- Urls
for _, url := range next {
if _, done := store[url] ; !done {
store[url] = true
band++
go Krawl(url, fetcher, Urls)
}
}
}
}
return
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@ViktorVoloshko
Copy link

ViktorVoloshko commented Oct 13, 2023

My relatively simple solution for crawl w/o global variables and channels and with mutex and WaitGroup:

package main

import (
	"fmt"
	"sync"
)

type UrlCache struct {
	urls map[string]bool
	mu   sync.Mutex
}

func (uc *UrlCache) Add(url string) {
	uc.mu.Lock()
	uc.urls[url] = true
	uc.mu.Unlock()
}

func (uc *UrlCache) Get(url string) bool {
	uc.mu.Lock()
	defer uc.mu.Unlock()
	return uc.urls[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	var wg sync.WaitGroup
	cache := UrlCache{urls: make(map[string]bool)}
	wg.Add(1)
	go crawl(url, depth, fetcher, &cache, &wg)
	wg.Wait()
}

func crawl(url string, depth int, fetcher Fetcher, cache *UrlCache, wg *sync.WaitGroup) {
	defer wg.Done()
	if depth <= 0 || cache.Get(url) {
		return
	}
	cache.Add(url)
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		wg.Add(1)
		go crawl(u, depth-1, fetcher, cache, wg)
	}
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@apo1967
Copy link

apo1967 commented Nov 30, 2023

@Aternus

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...

@dkowsley
Copy link

dkowsley commented Sep 15, 2024

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

In the quoted code, we see a call to io.Copy

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

The relevant call chain is io.Copy(dst, src) -> io.copyBuffer(dst, src, nil) -> src.Read(buf) (src == r, in this example).

The function definition for io.Copy and the io.Reader interface it uses are:

type Reader interface {
	Read(p []byte) (n int, err error)
}

func Copy(dst Writer, src Reader) (written int64, err error) {
	return copyBuffer(dst, src, nil)
}

io.copyBuffer has the following definition (I have replaced most of the code with comments to make it easier to read). Take note of the fourth line:

func copyBuffer(dst Writer, src Reader, buf []byte) (written int64, err error) {
        // if buf is nil we create a temp 32KB buffer for it
	for {
		nr, er := src.Read(buf) // our call to the Read function we created in our example
		// perform writes to dst with error handling and returning if we have finished reading
	}
	return written, err
}

@linhns
Copy link

linhns commented Jan 15, 2025

My solution for Web Crawler, heavily inspired by the official solution:

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type URLCache struct {
	mu      sync.Mutex
	fetched map[string]struct{}
}

func (c *URLCache) Add(url string) {
	c.mu.Lock()
	defer c.mu.Unlock()
	c.fetched[url] = struct{}{}
}

func (c *URLCache) Has(url string) bool {
	c.mu.Lock()
	defer c.mu.Unlock()
	_, ok := c.fetched[url]
	return ok
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func crawl(url string, depth int, fetcher Fetcher, urlCache *URLCache) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	if depth <= 0 {
		fmt.Printf("<- Done with %v, maximum depth reached\n", url)
		return
	}

	if urlCache.Has(url) {
		fmt.Printf("<- Done with %v, already fetched\n", url)
		return
	}

	body, urls, err := fetcher.Fetch(url)

	urlCache.Add(url)

	if err != nil {
		fmt.Printf("<- Error on %v: %v\n", url, err)
		return
	}

	fmt.Printf("Found: %s %q\n", url, body)

	var wg sync.WaitGroup
	wg.Add(len(urls))
	for i, u := range urls {
		fmt.Printf("-> Crawling child %v/%v of %v : %v.\n", i, len(urls), url, u)
		go func(url string) {
			defer wg.Done()
			crawl(url, depth-1, fetcher, urlCache)
		}(u)
	}
	wg.Wait()
	fmt.Printf("<- Done with %v\n", url)
}

func Crawl(url string, depth int, fetcher Fetcher) {
	urlCache := URLCache{fetched: make(map[string]struct{})}
	crawl(url, depth, fetcher, &urlCache)
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

To avoid creating a global variable, I used an unexported function that takes in a concurrent-safe cache and initialize it in Crawl.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment