From b5d843e4a6b9046033efd6e4a25a6f2f16cb3cca Mon Sep 17 00:00:00 2001 From: gutmet Date: Thu, 15 Oct 2020 23:10:09 +0200 Subject: [PATCH] refactor * group code somewhat sanely in separate files * unify output of update endpoint requests and the rest as preparation for templates --- drivel.go | 474 +++++----------------------------------------------- media.go | 260 ++++++++++++++++++++++++++++ requests.go | 59 +++++++ types.go | 112 +++++++++++++ 4 files changed, 474 insertions(+), 431 deletions(-) create mode 100644 media.go create mode 100644 requests.go create mode 100644 types.go diff --git a/drivel.go b/drivel.go index 910377c..6ffb1d0 100644 --- a/drivel.go +++ b/drivel.go @@ -1,30 +1,23 @@ package main import ( - "encoding/base64" "encoding/json" "errors" "flag" "fmt" goutil "git.gutmet.org/goutil.git/misc" - "html" "io/ioutil" "net/http" "net/url" "os" - "path" "path/filepath" - "strconv" "strings" "time" ) const ( - MAX_BYTES = 50 * 1024 * 1024 - CHUNK_SIZE = 1024 * 1024 CHARACTER_LIMIT = 280 WIPE_KEEP_DAYS = 10 - UPLOAD_ENDPOINT = "https://upload.twitter.com/1.1/media/upload.json" STATUS_ENDPOINT = "https://api.twitter.com/1.1/statuses/update.json" MENTIONS_ENDPOINT = "https://api.twitter.com/1.1/statuses/mentions_timeline.json?tweet_mode=extended&count=200" HOME_ENDPOINT = "https://api.twitter.com/1.1/statuses/home_timeline.json?tweet_mode=extended&count=200" @@ -44,123 +37,6 @@ func optLogFatal(decorum string, err error) { } } -type ObjectID string - -type TwitterError struct { - Code int64 - Message string - Label string -} - -type Response struct { - Errors []TwitterError -} - -func (r Response) Error() string { - if len(r.Errors) == 0 { - return "" - } else { - s, _ := json.Marshal(r) - return "Response error " + string(s) - } -} - -func InitRequest(mediaType string, totalBytes int) url.Values { - r := map[string][]string{ - "command": {"INIT"}, - "media_type": {mediaType}, - "total_bytes": {strconv.Itoa(totalBytes)}, - } - if mediaType == "video/mp4" { - r["media_category"] = []string{"tweet_video"} - } - return r -} - -type InitResponse struct { - Response - Media_id_string string -} - -func AppendRequest(mediaID ObjectID, mediaData string, segmentIndex int) url.Values { - return map[string][]string{ - "command": {"APPEND"}, - "media_id": {string(mediaID)}, - "media_data": {mediaData}, - "segment_index": {strconv.Itoa(segmentIndex)}, - } -} - -func FinalizeRequest(mediaID ObjectID) url.Values { - return map[string][]string{ - "command": {"FINALIZE"}, - "media_id": {string(mediaID)}, - } -} - -type FinalizeResponse struct { - Error string - Media_id_string string - Processing_info ProcessingInfo -} - -type ProcessingInfo struct { - State string - Check_after_secs int64 - Progress_percent int64 - Error TwitterError -} - -func PollStatusParameters(mediaID ObjectID) string { - return "?command=STATUS&media_id=" + string(mediaID) -} - -type PollStatusResponse struct { - Processing_info ProcessingInfo -} - -func UpdateStatusRequest(status string, mediaIDs []ObjectID, previousStatusID ObjectID) url.Values { - r := map[string][]string{"status": {status}} - if len(mediaIDs) > 0 { - ids := []string{} - for _, id := range mediaIDs { - ids = append(ids, string(id)) - } - r["media_ids"] = []string{strings.Join(ids, ",")} - } - if len(previousStatusID) > 0 { - r["in_reply_to_status_id"] = []string{string(previousStatusID)} - r["auto_populate_reply_metadata"] = []string{"true"} - } - return r -} - -type UpdateStatusResponse struct { - Response - Id_str string -} - -func LookupParameters(ids []string) string { - return "&id=" + strings.Join(ids, ",") -} - -var mimetype = map[string]string{ - ".mp4": "video/mp4", - ".jpg": "image/jpeg", - ".jpeg": "image/jpeg", - ".png": "image/png", - ".gif": "image/gif", -} - -func getMimetype(file string) string { - ext := filepath.Ext(file) - if v, ok := mimetype[ext]; ok { - return v - } else { - return "application/octet-stream" - } -} - func get(url string) []byte { return _send(url, nil, false) } @@ -193,151 +69,13 @@ func _send(url string, vals url.Values, usePost bool) []byte { return body } -func initFileUpload(file string, mediaData []byte) ObjectID { - log := func(err error) { optLogFatal("initFileUpload "+file, err) } - initRequest := InitRequest(getMimetype(file), len(mediaData)) - body := send(UPLOAD_ENDPOINT, initRequest) - var initResponse InitResponse - err := json.Unmarshal(body, &initResponse) - log(err) - log(initResponse) - return ObjectID(initResponse.Media_id_string) -} - -func appendFileChunks(file string, media string, mediaId ObjectID) { - log := func(err error) { optLogFatal("appendFileChunks", err) } - info := func(v ...interface{}) { - if len(media) > CHUNK_SIZE { - fmt.Println(v...) +func lastSpace(slice []rune) int { + for i := len(slice) - 1; i >= 0; i-- { + if slice[i] == ' ' { + return i } } - info("chunk upload", file) - info("total", len(media)) - for i := 0; i*CHUNK_SIZE < len(media); i = i + 1 { - start := i * CHUNK_SIZE - end := (i + 1) * CHUNK_SIZE - if end > len(media) { - end = len(media) - } - info("segment", i, "start", start, "end", end) - appended := false - var body []byte - for try := 0; try < 3 && !appended; try++ { - appRequest := AppendRequest(mediaId, media[start:end], i) - body = send(UPLOAD_ENDPOINT, appRequest) - if string(body) == "" { - appended = true - } - } - if !appended { - log(errors.New(string(body))) - } - } -} - -func finalizeFileUpload(file string, mediaId ObjectID) int64 { - log := func(err error) { optLogFatal("finalizeFileUpload", err) } - body := send(UPLOAD_ENDPOINT, FinalizeRequest(mediaId)) - var finalizeResponse FinalizeResponse - err := json.Unmarshal(body, &finalizeResponse) - log(err) - log(errors.New(finalizeResponse.Error)) - if id := ObjectID(finalizeResponse.Media_id_string); id != "" { - fmt.Println("==> Uploaded " + file + " with id " + string(id)) - procInfo := finalizeResponse.Processing_info - return procInfo.Check_after_secs - } else { - log(errors.New("Could not finalize " + string(mediaId))) - return 0 - } -} - -func wait(seconds int64) { - fmt.Println("Waiting", seconds, "seconds") - time.Sleep(time.Duration(seconds) * time.Second) -} - -func pollStatus(mediaId ObjectID) { - log := func(err error) { optLogFatal("pollStatus "+string(mediaId), err) } - succeeded := false - var error TwitterError - for try := 0; try < 6; try = try + 1 { - body := get(UPLOAD_ENDPOINT + PollStatusParameters(mediaId)) - var response PollStatusResponse - err := json.Unmarshal(body, &response) - log(err) - procInfo := response.Processing_info - state := procInfo.State - error = procInfo.Error - if state == "succeeded" { - succeeded = true - break - } else if state == "failed" { - break - } else { - fmt.Println("Processing progress: ", procInfo.Progress_percent, "%") - seconds := procInfo.Check_after_secs - if seconds > 10 { - seconds = 10 - } - wait(seconds) - } - } - if !succeeded { - log(errors.New("File upload failed " + error.Message)) - } -} - -func uploadFile(file string) ObjectID { - log := func(err error) { optLogFatal("uploadFile "+file, err) } - tmpMedia, err := ioutil.ReadFile(file) - log(err) - media := base64.RawURLEncoding.EncodeToString(tmpMedia) - mediaId := initFileUpload(file, tmpMedia) - appendFileChunks(file, media, mediaId) - seconds := finalizeFileUpload(file, mediaId) - if seconds > 0 { - wait(seconds) - pollStatus(mediaId) - } - return mediaId -} - -func uploadAll(files []string) []ObjectID { - ids := []ObjectID{} - for _, f := range files { - if f != "" { - id := uploadFile(f) - ids = append(ids, id) - } - } - return ids -} - -type mediaKind int - -const ( - UNKNOWN mediaKind = iota - PIC - GIF - VIDEO -) - -func kind(path string) mediaKind { - ext := filepath.Ext(path) - switch ext { - case ".jpg": - fallthrough - case ".jpeg": - fallthrough - case ".png": - return PIC - case ".gif": - return GIF - case ".mp4": - return VIDEO - } - return UNKNOWN + return -1 } func splitStatus(status string) []string { @@ -358,12 +96,11 @@ func splitStatus(status string) []string { if len(asRunes) <= characterLimit { limit = len(asRunes) } else { - tmp := asRunes[0:characterLimit] - lastSpace := strings.LastIndex(string(tmp), " ") - if lastSpace == -1 { + limit = lastSpace(asRunes[0:characterLimit]) + if limit == -1 { limit = characterLimit } else { - limit = lastSpace + 1 + limit = limit + 1 } } split = append(split, string(asRunes[0:limit])) @@ -372,19 +109,6 @@ func splitStatus(status string) []string { return split } -func exitIfInvalid(path string) { - log := func(err error) { optLogFatal("exitIfInvalid", err) } - // check existence AND readability - f, err := os.Open(path) - log(err) - defer f.Close() - tmp, err := ioutil.ReadAll(f) - log(err) - if len(tmp) > MAX_BYTES { - log(errors.New("File too big: " + path + " is bigger than maximum of " + strconv.Itoa(MAX_BYTES) + " Bytes")) - } -} - func splitArguments(args []string) data { if len(args) < 1 { fmt.Fprintln(os.Stderr, "Usage: drivel status STATUS [FILE1 FILE2 ...]") @@ -408,18 +132,15 @@ func splitArguments(args []string) data { return d } -func tweet(status string, mediaIDs []ObjectID, previousID ObjectID) ObjectID { +func tweet(status string, mediaIDs []ObjectID, previousID ObjectID) Status { log := func(err error) { optLogFatal("tweet "+status, err) } request := UpdateStatusRequest(status, mediaIDs, previousID) body := send(STATUS_ENDPOINT, request) - var sr UpdateStatusResponse - err := json.Unmarshal(body, &sr) + var tweet Status + err := json.Unmarshal(body, &tweet) log(err) - if len(sr.Errors) > 0 { - log(sr) - } - fmt.Println("==> Updated status to '" + status + "' with id " + sr.Id_str) - return ObjectID(sr.Id_str) + log(tweet) + return tweet } type data struct { @@ -456,12 +177,13 @@ func (d *data) uploadVideo(i int) []ObjectID { return uploadAll([]string{vid}) } -func (d *data) push(previous ObjectID) { - if d == nil { - return - } +func (d *data) push(previous ObjectID) []Status { empty := false i, g, v := 0, 0, 0 + tweets := []Status{} + if d == nil { + return tweets + } for !empty { empty = true status := d.getStatus(i) @@ -484,13 +206,16 @@ func (d *data) push(previous ObjectID) { empty = false } if !empty { - previous = tweet(status, mediaIDs, previous) + t := tweet(status, mediaIDs, previous) + tweets = append(tweets, t) + previous = ObjectID(t.Id_str) i++ } } + return tweets } -func updateStatus(args []string, previous ObjectID, embedTweet ObjectID) { +func updateStatus(args []string, previous ObjectID, embedTweet ObjectID) []Status { d := splitArguments(args) if embedTweet != "" { tweets := _lookup([]string{string(embedTweet)}) @@ -498,11 +223,21 @@ func updateStatus(args []string, previous ObjectID, embedTweet ObjectID) { d.status[0] += " " + tweets[0].URL() } } - d.push(previous) + return d.push(previous) +} + +func PrintTweets(tweets []Status, userFilter hashset) { + for _, tweet := range tweets { + if !userFilter.contains(tweet.User.Screen_name) { + fmt.Println(tweet.String()) + fmt.Println("---------") + } + } } func status(args []string) error { - updateStatus(args, "", "") + tweets := updateStatus(args, "", "") + PrintTweets(tweets, nil) return nil } @@ -511,7 +246,8 @@ func reply(args []string) error { fmt.Fprintln(os.Stderr, "Usage: drivel reply TWEET_ID MESSAGE [FILE1 FILE2 ...]") os.Exit(-1) } - updateStatus(args[1:], ObjectID(args[0]), "") + tweets := updateStatus(args[1:], ObjectID(args[0]), "") + PrintTweets(tweets, nil) return nil } @@ -520,93 +256,11 @@ func quote(args []string) error { fmt.Println(os.Stderr, "Usage: drivel quote TWEET_ID MESSAGE [FILE1 FILE2 ...]") os.Exit(-1) } - updateStatus(args[1:], "", ObjectID(args[0])) + tweets := updateStatus(args[1:], "", ObjectID(args[0])) + PrintTweets(tweets, nil) return nil } -type TwitterTime struct { - time.Time -} - -func (twt *TwitterTime) UnmarshalJSON(b []byte) error { - s := strings.Trim(string(b), "\"") - var err error - twt.Time, err = time.Parse(time.RubyDate, s) - return err -} - -type Status struct { - Full_text string - Id_str string - Created_at TwitterTime - In_reply_to_screen_name string - In_reply_to_status_id_str string - User StatusUser - Quoted_status *Status - Retweeted_status *Status - Extended_entities Entities -} - -func (t Status) equals(t2 Status) bool { - return t.Id_str == t2.Id_str -} - -type Entities struct { - Media []Media -} - -type Media struct { - Media_url string -} - -func (m Status) InReplyTo() string { - if m.In_reply_to_status_id_str != "" { - return m.In_reply_to_screen_name + " (" + m.In_reply_to_status_id_str + ")" - } else { - return "" - } -} - -func (m Status) String() string { - if m.Retweeted_status != nil { - return m.User.Screen_name + " retweeted " + m.Retweeted_status.String() - } - s := m.User.Screen_name + " " + "(" + m.Id_str + ")" - if replyTo := m.InReplyTo(); replyTo != "" { - s += " in reply to " + replyTo - } - s += ":\n" + html.UnescapeString(m.Full_text) - allMedia := m.Extended_entities.Media - if len(allMedia) > 0 { - s += "\n\nMedia:" - for _, media := range allMedia { - s += " " + media.Media_url - } - } - if m.Quoted_status != nil { - s += "\n\nQuotes " + m.Quoted_status.String() - } - return s -} - -func PrintTweets(tweets []Status, userFilter hashset) { - for _, tweet := range tweets { - if !userFilter.contains(tweet.User.Screen_name) { - fmt.Println(tweet) - fmt.Println("---------") - } - } -} - -func (m Status) URL() string { - return "https://twitter.com/" + m.User.Screen_name + "/status/" + m.Id_str -} - -type StatusUser struct { - Name string - Screen_name string -} - func _lookup(ids []string) []Status { log := func(err error) { optLogFatal("lookup "+strings.Join(ids, ","), err) } body := get(LOOKUP_ENDPOINT + LookupParameters(ids)) @@ -647,15 +301,6 @@ func home(args []string) error { return nil } -func UserTimelineParameters(flags userTimelineFlags, screenName string) string { - s := "&screen_name=" + screenName - if flags.withReplies { - return s - } else { - return s + "&exclude_replies=true" - } -} - func userTimeline(flags userTimelineFlags, args []string) error { tweets := timeline(TIMELINE_ENDPOINT + UserTimelineParameters(flags, args[0])) PrintTweets(tweets, nil) @@ -674,10 +319,6 @@ func userTimelineCommand() (goutil.CommandFlagsInit, goutil.CommandFunc) { return flagsInit, func(args []string) error { return userTimeline(f, args) } } -func RetweetParameters(id string) string { - return id + ".json" -} - func retweet(args []string) error { log := func(err error) { optLogFatal("retweet", err) } if len(args) != 1 { @@ -693,17 +334,10 @@ func retweet(args []string) error { var retweet Status err := json.Unmarshal(body, &retweet) log(err) - fmt.Println("Retweeted", tweets[0]) + PrintTweets([]Status{retweet}, nil) return nil } -func LikeRequest(id string) url.Values { - return map[string][]string{ - "id": {id}, - "tweet_mode": {"extended"}, - } -} - func like(args []string) error { log := func(err error) { optLogFatal("like", err) } if len(args) != 1 { @@ -714,28 +348,10 @@ func like(args []string) error { var tweet Status err := json.Unmarshal(body, &tweet) log(err) - fmt.Println("Liked", tweet) + PrintTweets([]Status{tweet}, nil) return nil } -func equals(t1 []Status, t2 []Status) bool { - if len(t1) != len(t2) { - return false - } - for i := range t1 { - if !t1[i].equals(t2[i]) { - return false - } - } - return true -} - -func UnlikeRequest(id string) url.Values { - return map[string][]string{ - "id": {id}, - } -} - func unlike(id string) { log := func(err error) { optLogFatal("unlike", err) } body := send(DESTROY_LIKE_ENDPOINT, UnlikeRequest(id)) @@ -745,10 +361,6 @@ func unlike(id string) { fmt.Println("Unliked", tweet.Id_str) } -func DestroyParameters(id string) string { - return id + ".json" -} - func destroyStatus(id string) { log := func(err error) { optLogFatal("destroy", err) } body := send(DESTROY_STATUS_ENDPOINT+DestroyParameters(id), nil) @@ -846,8 +458,8 @@ func setFilters(appDir string) { } return set } - homeFilter = getHashset(goutil.ReadFile(path.Join(appDir, "FilterHome"))) - mentionsFilter = getHashset(goutil.ReadFile(path.Join(appDir, "FilterMentions"))) + homeFilter = getHashset(goutil.ReadFile(filepath.Join(appDir, "FilterHome"))) + mentionsFilter = getHashset(goutil.ReadFile(filepath.Join(appDir, "FilterMentions"))) } var client *http.Client diff --git a/media.go b/media.go new file mode 100644 index 0000000..89f014f --- /dev/null +++ b/media.go @@ -0,0 +1,260 @@ +package main + +import ( + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/url" + "os" + "path/filepath" + "strconv" + "time" +) + +const ( + MAX_BYTES = 50 * 1024 * 1024 + CHUNK_SIZE = 1024 * 1024 + UPLOAD_ENDPOINT = "https://upload.twitter.com/1.1/media/upload.json" +) + +func InitRequest(mediaType string, totalBytes int) url.Values { + r := map[string][]string{ + "command": {"INIT"}, + "media_type": {mediaType}, + "total_bytes": {strconv.Itoa(totalBytes)}, + } + if mediaType == "video/mp4" { + r["media_category"] = []string{"tweet_video"} + } + return r +} + +type InitResponse struct { + Errors []TwitterError + Media_id_string string +} + +func (ir InitResponse) Error() string { + if len(ir.Errors) == 0 { + return "" + } else { + s, _ := json.Marshal(ir) + return "Response error " + string(s) + } +} + +func AppendRequest(mediaID ObjectID, mediaData string, segmentIndex int) url.Values { + return map[string][]string{ + "command": {"APPEND"}, + "media_id": {string(mediaID)}, + "media_data": {mediaData}, + "segment_index": {strconv.Itoa(segmentIndex)}, + } +} + +func FinalizeRequest(mediaID ObjectID) url.Values { + return map[string][]string{ + "command": {"FINALIZE"}, + "media_id": {string(mediaID)}, + } +} + +type FinalizeResponse struct { + Error string + Media_id_string string + Processing_info ProcessingInfo +} + +type ProcessingInfo struct { + State string + Check_after_secs int64 + Progress_percent int64 + Error TwitterError +} + +func PollStatusParameters(mediaID ObjectID) string { + return "?command=STATUS&media_id=" + string(mediaID) +} + +type PollStatusResponse struct { + Processing_info ProcessingInfo +} + +var mimetype = map[string]string{ + ".mp4": "video/mp4", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".png": "image/png", + ".gif": "image/gif", +} + +func getMimetype(file string) string { + ext := filepath.Ext(file) + if v, ok := mimetype[ext]; ok { + return v + } else { + return "application/octet-stream" + } +} + +func initFileUpload(file string, mediaData []byte) ObjectID { + log := func(err error) { optLogFatal("initFileUpload "+file, err) } + initRequest := InitRequest(getMimetype(file), len(mediaData)) + body := send(UPLOAD_ENDPOINT, initRequest) + var initResponse InitResponse + err := json.Unmarshal(body, &initResponse) + log(err) + log(initResponse) + return ObjectID(initResponse.Media_id_string) +} + +func appendFileChunks(file string, media string, mediaId ObjectID) { + log := func(err error) { optLogFatal("appendFileChunks", err) } + info := func(v ...interface{}) { + if len(media) > CHUNK_SIZE { + fmt.Println(v...) + } + } + info("chunk upload", file) + info("total", len(media)) + for i := 0; i*CHUNK_SIZE < len(media); i = i + 1 { + start := i * CHUNK_SIZE + end := (i + 1) * CHUNK_SIZE + if end > len(media) { + end = len(media) + } + info("segment", i, "start", start, "end", end) + appended := false + var body []byte + for try := 0; try < 3 && !appended; try++ { + appRequest := AppendRequest(mediaId, media[start:end], i) + body = send(UPLOAD_ENDPOINT, appRequest) + if string(body) == "" { + appended = true + } + } + if !appended { + log(errors.New(string(body))) + } + } +} + +func finalizeFileUpload(file string, mediaId ObjectID) int64 { + log := func(err error) { optLogFatal("finalizeFileUpload", err) } + body := send(UPLOAD_ENDPOINT, FinalizeRequest(mediaId)) + var finalizeResponse FinalizeResponse + err := json.Unmarshal(body, &finalizeResponse) + log(err) + log(errors.New(finalizeResponse.Error)) + if id := ObjectID(finalizeResponse.Media_id_string); id != "" { + fmt.Println("==> Uploaded " + file + " with id " + string(id)) + procInfo := finalizeResponse.Processing_info + return procInfo.Check_after_secs + } else { + log(errors.New("Could not finalize " + string(mediaId))) + return 0 + } +} + +func wait(seconds int64) { + fmt.Println("Waiting", seconds, "seconds") + time.Sleep(time.Duration(seconds) * time.Second) +} + +func pollStatus(mediaId ObjectID) { + log := func(err error) { optLogFatal("pollStatus "+string(mediaId), err) } + succeeded := false + var error TwitterError + for try := 0; try < 6; try = try + 1 { + body := get(UPLOAD_ENDPOINT + PollStatusParameters(mediaId)) + var response PollStatusResponse + err := json.Unmarshal(body, &response) + log(err) + procInfo := response.Processing_info + state := procInfo.State + error = procInfo.Error + if state == "succeeded" { + succeeded = true + break + } else if state == "failed" { + break + } else { + fmt.Println("Processing progress: ", procInfo.Progress_percent, "%") + seconds := procInfo.Check_after_secs + if seconds > 10 { + seconds = 10 + } + wait(seconds) + } + } + if !succeeded { + log(errors.New("File upload failed " + error.Message)) + } +} + +func uploadFile(file string) ObjectID { + log := func(err error) { optLogFatal("uploadFile "+file, err) } + tmpMedia, err := ioutil.ReadFile(file) + log(err) + media := base64.RawURLEncoding.EncodeToString(tmpMedia) + mediaId := initFileUpload(file, tmpMedia) + appendFileChunks(file, media, mediaId) + seconds := finalizeFileUpload(file, mediaId) + if seconds > 0 { + wait(seconds) + pollStatus(mediaId) + } + return mediaId +} + +func uploadAll(files []string) []ObjectID { + ids := []ObjectID{} + for _, f := range files { + if f != "" { + id := uploadFile(f) + ids = append(ids, id) + } + } + return ids +} + +type mediaKind int + +const ( + UNKNOWN mediaKind = iota + PIC + GIF + VIDEO +) + +func kind(path string) mediaKind { + ext := filepath.Ext(path) + switch ext { + case ".jpg": + fallthrough + case ".jpeg": + fallthrough + case ".png": + return PIC + case ".gif": + return GIF + case ".mp4": + return VIDEO + } + return UNKNOWN +} + +func exitIfInvalid(path string) { + log := func(err error) { optLogFatal("exitIfInvalid", err) } + // check existence AND readability + f, err := os.Open(path) + log(err) + defer f.Close() + tmp, err := ioutil.ReadAll(f) + log(err) + if len(tmp) > MAX_BYTES { + log(errors.New("File too big: " + path + " is bigger than maximum of " + strconv.Itoa(MAX_BYTES) + " Bytes")) + } +} diff --git a/requests.go b/requests.go new file mode 100644 index 0000000..fee3de0 --- /dev/null +++ b/requests.go @@ -0,0 +1,59 @@ +package main + +import ( + "net/url" + "strings" +) + +func LookupParameters(ids []string) string { + return "&id=" + strings.Join(ids, ",") +} + +func UpdateStatusRequest(status string, mediaIDs []ObjectID, previousStatusID ObjectID) url.Values { + r := map[string][]string{ + "status": {status}, + "tweet_mode": {"extended"}, + } + if len(mediaIDs) > 0 { + ids := []string{} + for _, id := range mediaIDs { + ids = append(ids, string(id)) + } + r["media_ids"] = []string{strings.Join(ids, ",")} + } + if len(previousStatusID) > 0 { + r["in_reply_to_status_id"] = []string{string(previousStatusID)} + r["auto_populate_reply_metadata"] = []string{"true"} + } + return r +} + +func UserTimelineParameters(flags userTimelineFlags, screenName string) string { + s := "&screen_name=" + screenName + if flags.withReplies { + return s + } else { + return s + "&exclude_replies=true" + } +} + +func RetweetParameters(id string) string { + return id + ".json" +} + +func LikeRequest(id string) url.Values { + return map[string][]string{ + "id": {id}, + "tweet_mode": {"extended"}, + } +} + +func UnlikeRequest(id string) url.Values { + return map[string][]string{ + "id": {id}, + } +} + +func DestroyParameters(id string) string { + return id + ".json" +} diff --git a/types.go b/types.go new file mode 100644 index 0000000..b8c0e7d --- /dev/null +++ b/types.go @@ -0,0 +1,112 @@ +package main + +import ( + "encoding/json" + "html" + "strings" + "time" +) + +type ObjectID string + +type TwitterError struct { + Code int64 + Message string + Label string +} + +type TwitterTime struct { + time.Time +} + +func (twt *TwitterTime) UnmarshalJSON(b []byte) error { + s := strings.Trim(string(b), "\"") + var err error + twt.Time, err = time.Parse(time.RubyDate, s) + return err +} + +type Entities struct { + Media []Media +} + +type Media struct { + Media_url string +} + +type StatusUser struct { + Name string + Screen_name string +} + +type Status struct { + Errors []TwitterError + Full_text string + Id_str string + Created_at TwitterTime + In_reply_to_screen_name string + In_reply_to_status_id_str string + User StatusUser + Quoted_status *Status + Retweeted_status *Status + Extended_entities Entities +} + +func (t Status) Error() string { + if len(t.Errors) == 0 { + return "" + } else { + s, _ := json.Marshal(t) + return "Response error " + string(s) + } +} + +func (t Status) equals(t2 Status) bool { + return t.Id_str == t2.Id_str +} + +func equals(t1 []Status, t2 []Status) bool { + if len(t1) != len(t2) { + return false + } + for i := range t1 { + if !t1[i].equals(t2[i]) { + return false + } + } + return true +} + +func (t Status) InReplyTo() string { + if t.In_reply_to_status_id_str != "" { + return t.In_reply_to_screen_name + " (" + t.In_reply_to_status_id_str + ")" + } else { + return "" + } +} + +func (t Status) String() string { + if t.Retweeted_status != nil { + return t.User.Screen_name + " retweeted " + t.Retweeted_status.String() + } + s := t.User.Screen_name + " " + "(" + t.Id_str + ")" + if replyTo := t.InReplyTo(); replyTo != "" { + s += " in reply to " + replyTo + } + s += ":\n" + html.UnescapeString(t.Full_text) + allMedia := t.Extended_entities.Media + if len(allMedia) > 0 { + s += "\n\nMedia:" + for _, media := range allMedia { + s += " " + media.Media_url + } + } + if t.Quoted_status != nil { + s += "\n\nQuotes " + t.Quoted_status.String() + } + return s +} + +func (t Status) URL() string { + return "https://twitter.com/" + t.User.Screen_name + "/status/" + t.Id_str +}