Skip to content

Commit

Permalink
Go fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
SlyMarbo committed Jul 22, 2013
1 parent b9bc2db commit c720f7c
Show file tree
Hide file tree
Showing 8 changed files with 224 additions and 224 deletions.
114 changes: 57 additions & 57 deletions ISO-8859-1.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,81 +11,81 @@ import (
// ISO-8859-1 support

type charsetISO88591er struct {
r io.ByteReader
buf *bytes.Buffer
r io.ByteReader
buf *bytes.Buffer
}

func newCharsetISO88591(r io.Reader) *charsetISO88591er {
buf := bytes.NewBuffer(make([]byte, 0, utf8.UTFMax))
return &charsetISO88591er{r.(io.ByteReader), buf}
buf := bytes.NewBuffer(make([]byte, 0, utf8.UTFMax))
return &charsetISO88591er{r.(io.ByteReader), buf}
}

func (cs *charsetISO88591er) ReadByte() (b byte, err error) {
// http://unicode.org/Public/MAPPINGS/ISO8859/8859-1.TXT
// Date: 1999 July 27; Last modified: 27-Feb-2001 05:08
if cs.buf.Len() <= 0 {
r, err := cs.r.ReadByte()
if err != nil {
return 0, err
}
if r < utf8.RuneSelf {
return r, nil
}
cs.buf.WriteRune(rune(r))
}
return cs.buf.ReadByte()
// http://unicode.org/Public/MAPPINGS/ISO8859/8859-1.TXT
// Date: 1999 July 27; Last modified: 27-Feb-2001 05:08
if cs.buf.Len() <= 0 {
r, err := cs.r.ReadByte()
if err != nil {
return 0, err
}
if r < utf8.RuneSelf {
return r, nil
}
cs.buf.WriteRune(rune(r))
}
return cs.buf.ReadByte()
}

func (cs *charsetISO88591er) Read(p []byte) (int, error) {
// Use ReadByte method.
return 0, errors.New("Use ReadByte()")
// Use ReadByte method.
return 0, errors.New("Use ReadByte()")
}

func isCharset(charset string, names []string) bool {
charset = strings.ToLower(charset)
for _, n := range names {
if charset == strings.ToLower(n) {
return true
}
}
return false
charset = strings.ToLower(charset)
for _, n := range names {
if charset == strings.ToLower(n) {
return true
}
}
return false
}

func isCharsetISO88591(charset string) bool {
// http://www.iana.org/assignments/character-sets
// (last updated 2010-11-04)
names := []string{
// Name
"ISO_8859-1:1987",
// Alias (preferred MIME name)
"ISO-8859-1",
// Aliases
"iso-ir-100",
"ISO_8859-1",
"latin1",
"l1",
"IBM819",
"CP819",
"csISOLatin1",
}
return isCharset(charset, names)
// http://www.iana.org/assignments/character-sets
// (last updated 2010-11-04)
names := []string{
// Name
"ISO_8859-1:1987",
// Alias (preferred MIME name)
"ISO-8859-1",
// Aliases
"iso-ir-100",
"ISO_8859-1",
"latin1",
"l1",
"IBM819",
"CP819",
"csISOLatin1",
}
return isCharset(charset, names)
}

func isCharsetUTF8(charset string) bool {
names := []string{
"UTF-8",
// Default
"",
}
return isCharset(charset, names)
names := []string{
"UTF-8",
// Default
"",
}
return isCharset(charset, names)
}

func charsetReader(charset string, input io.Reader) (io.Reader, error) {
switch {
case isCharsetUTF8(charset):
return input, nil
case isCharsetISO88591(charset):
return newCharsetISO88591(input), nil
}
return nil, errors.New("CharsetReader: unexpected charset: " + charset)
}
switch {
case isCharsetUTF8(charset):
return input, nil
case isCharsetISO88591(charset):
return newCharsetISO88591(input), nil
}
return nil, errors.New("CharsetReader: unexpected charset: " + charset)
}
24 changes: 12 additions & 12 deletions atom.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,29 +15,29 @@ func parseAtom(data []byte, read *db) (*Feed, error) {
if err != nil {
return nil, err
}

out := new(Feed)
out.Title = feed.Title
out.Description = feed.Description
out.Link = feed.Link.Href
out.Image = feed.Image.Image()
out.Refresh = time.Now().Add(10 * time.Minute)

if feed.Items == nil {
return nil, fmt.Errorf("Error: no feeds found in %q.", string(data))
}

out.Items = make([]*Item, 0, len(feed.Items))
out.ItemMap = make(map[string]struct{})

// Process items.
for _, item := range feed.Items {

// Skip items already known.
if read.req <- item.ID; <- read.res {
if read.req <- item.ID; <-read.res {
continue
}

next := new(Item)
next.Title = item.Title
next.Content = item.Content
Expand All @@ -50,22 +50,22 @@ func parseAtom(data []byte, read *db) (*Feed, error) {
}
next.ID = item.ID
next.Read = false

if next.ID == "" {
fmt.Printf("Warning: Item %q has no ID and will be ignored.\n", next.Title)
continue
}

if _, ok := out.ItemMap[next.ID]; ok {
fmt.Printf("Warning: Item %q has duplicate ID.\n", next.Title)
continue
}

out.Items = append(out.Items, next)
out.ItemMap[next.ID] = struct{}{}
out.Unread++
}

return out, nil
}

Expand Down Expand Up @@ -97,7 +97,7 @@ type atomImage struct {
}

type atomLink struct {
Href string `xml:"href,attr"`
Href string `xml:"href,attr"`
}

func (a *atomImage) Image() *Image {
Expand Down
8 changes: 4 additions & 4 deletions database.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@ func init() {
}

type db struct {
req chan string
res chan bool
req chan string
res chan bool
known map[string]struct{}
}

func (d *db) Run() {
d.known = make(map[string]struct{})
var s string

for {
s = <- d.req
s = <-d.req
if _, ok := d.known[s]; ok {
d.res <- true
} else {
Expand Down
4 changes: 2 additions & 2 deletions doc.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ func main() {
if err != nil {
// handle error.
}
// ... Some time later ...
err = feed.Update()
if err != nil {
// handle error.
Expand Down
54 changes: 27 additions & 27 deletions rss 1.0.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ func parseRSS1(data []byte, read *db) (*Feed, error) {
if feed.Channel == nil {
return nil, fmt.Errorf("Error: no channel found in %q.", string(data))
}

channel := feed.Channel

out := new(Feed)
out.Title = channel.Title
out.Description = channel.Description
Expand All @@ -33,43 +33,43 @@ func parseRSS1(data []byte, read *db) (*Feed, error) {
next := time.Now().Add(time.Duration(channel.MinsToLive) * time.Minute)
for _, hour := range channel.SkipHours {
if hour == next.Hour() {
next.Add(time.Duration(60 - next.Minute()) * time.Minute)
next.Add(time.Duration(60-next.Minute()) * time.Minute)
}
}
trying := true
for trying {
trying = false
for _, day := range channel.SkipDays {
if strings.Title(day) == next.Weekday().String() {
next.Add(time.Duration(24 - next.Hour()) * time.Hour)
next.Add(time.Duration(24-next.Hour()) * time.Hour)
trying = true
break
}
}
}

out.Refresh = next
}

if out.Refresh.IsZero() {
out.Refresh = time.Now().Add(10 * time.Minute)
}

if feed.Items == nil {
return nil, fmt.Errorf("Error: no feeds found in %q.", string(data))
}

out.Items = make([]*Item, 0, len(feed.Items))
out.ItemMap = make(map[string]struct{})

// Process items.
for _, item := range feed.Items {

// Skip items already known.
if read.req <- item.ID; <- read.res {
if read.req <- item.ID; <-read.res {
continue
}

next := new(Item)
next.Title = item.Title
next.Content = item.Content
Expand All @@ -82,43 +82,43 @@ func parseRSS1(data []byte, read *db) (*Feed, error) {
}
next.ID = item.ID
next.Read = false

if next.ID == "" {
if next.Link == "" {
fmt.Printf("Warning: Item %q has no ID or link and will be ignored.\n", next.Title)
continue
}
next.ID = next.Link
}

if _, ok := out.ItemMap[next.ID]; ok {
fmt.Printf("Warning: Item %q has duplicate ID.\n", next.Title)
continue
}

out.Items = append(out.Items, next)
out.ItemMap[next.ID] = struct{}{}
out.Unread++
}

return out, nil
}

type rss1_0Feed struct {
XMLName xml.Name `xml:"RDF"`
Channel *rss1_0Channel `xml:"channel"`
Items []rss1_0Item `xml:"item"`
XMLName xml.Name `xml:"RDF"`
Channel *rss1_0Channel `xml:"channel"`
Items []rss1_0Item `xml:"item"`
}

type rss1_0Channel struct {
XMLName xml.Name `xml:"channel"`
Title string `xml:"title"`
Description string `xml:"description"`
Link string `xml:"link"`
Image rss1_0Image `xml:"image"`
MinsToLive int `xml:"ttl"`
SkipHours []int `xml:"skipHours>hour"`
SkipDays []string `xml:"skipDays>day"`
XMLName xml.Name `xml:"channel"`
Title string `xml:"title"`
Description string `xml:"description"`
Link string `xml:"link"`
Image rss1_0Image `xml:"image"`
MinsToLive int `xml:"ttl"`
SkipHours []int `xml:"skipHours>hour"`
SkipDays []string `xml:"skipDays>day"`
}

type rss1_0Item struct {
Expand Down
Loading

0 comments on commit c720f7c

Please sign in to comment.