Golang

From HackerNet
Jump to: navigation, search

Intro
Golang är det nya hippa språket som Google har utvecklat. Det är enkelt att läsa som Python, snabbt och low level som C.

Golang Concurreny

So basically goroutines are amazing in go, they are cheap, fast and even though one might think they run in parallel they don't. So lets say that I wanna run 2k HTTP GET requests towards skooog.se. Well I can do this concurrent, but we are gonna run into several issues, one being the amount of sockets we have on our client machine.

So sockets are expensive, goroutines aren't, this is how we solve it.

 1  package main                                                                    
 2                                                                                  
 3  import (                                                                        
 4     "fmt"                                                                        
 5     "io/ioutil"                                                                  
 6     "net/http"                                                                   
 7     "sync"                                                                       
 8  )                                                                               
 9                                                                                  
10  // We need something to recover in case one goroutine fails.                    
11  // Otherwise our whole program might crash or hang                              
12                                                                                  
13  func cleanup() {                                                                
14     if r := recover(); r != nil {                                                
15         fmt.Println("recovered in cleanup", r)                                   
16     }                                                                            
17  }                                                                               
18                                                                                  
19  func getURL(c chan<- *http.Response, wg *sync.WaitGroup, sem chan bool, url string) {
20     defer wg.Done()                                                              
21     defer cleanup()                                                              
22     sem <- true                                                                  
23     data, err := http.Get(url)                                                   
24     if err != nil {                                                              
25         panic(err)                                                               
26     }                                                                            
27                                                                                                                                                         
28     c <- data                                                                    
29     <-sem                                                                        
30                                                                                  
31     // Tell the waitgroup we are done                                            
32  }                                                                               
33                                                                                  
34  func main() {                                                                   
35     // Declare waitgroup                                                         
36     var wg sync.WaitGroup                                                        
37                                                                                  
38     // Make channels with buffersize to fit all our data
39     c := make(chan *http.Response, 1000)                                         
40                                                                                  
41     /*                                                                           
42      We create sem to act as a buffer, because goroutines are cheap and we can use a lot, however
43      sockets aren't. So we can easily spin up 10000 of goroutines without any issues,
44      but then we will start running out of sockets to use.
45     */                                                                           
46     sem := make(chan bool, 50)                                                   
47     for i := 0; i < 1000; i++ {                                                  
48         // Add 1 to the queue                                                    
49         wg.Add(1)                                                                
50         go getURL(c, &wg, sem, "http://skooog.se")                               
51     }                                                                            
52     // Wait until every job we added is done                                     
53     wg.Wait()                                                                    
54                                                                                  
55     // Close channel is needed for when we use the range statement below         
56     // If we don't close it range will never know when it should stop iterating, 
57     // this will cause the problem to hang                                       
58     close(c)                                                                     
59     fmt.Println("ok")                                                            
60                                                                                  
61     httpResponses := [][]byte{}                                                  
62                                                                                  
63     for elem := range c {                                                        
64         content, _ := ioutil.ReadAll(elem.Body)                                  
65         //fmt.Println(string(content))                                           
66         httpResponses = append(httpResponses, content)                           
67     }                                                                            
68                                                                                  
69     for _, slice := range httpResponses {                                        
70         fmt.Println(string(slice))                                               
71     }                                                                            
72  }