2019-01-01 19:35:22 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/base64"
|
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
2020-09-27 11:46:52 +02:00
|
|
|
"flag"
|
2019-01-01 19:35:22 +01:00
|
|
|
"fmt"
|
2020-10-11 13:14:57 +02:00
|
|
|
goutil "git.gutmet.org/goutil.git/misc"
|
2020-09-22 14:30:50 +02:00
|
|
|
"html"
|
2019-01-01 19:35:22 +01:00
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2020-09-18 20:49:46 +02:00
|
|
|
"time"
|
2019-01-01 19:35:22 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2020-09-27 11:46:52 +02:00
|
|
|
MAX_BYTES = 50 * 1024 * 1024
|
|
|
|
CHUNK_SIZE = 1024 * 1024
|
|
|
|
CHARACTER_LIMIT = 280
|
|
|
|
WIPE_KEEP_DAYS = 10
|
|
|
|
UPLOAD_ENDPOINT = "https://upload.twitter.com/1.1/media/upload.json"
|
|
|
|
STATUS_ENDPOINT = "https://api.twitter.com/1.1/statuses/update.json"
|
|
|
|
MENTIONS_ENDPOINT = "https://api.twitter.com/1.1/statuses/mentions_timeline.json?tweet_mode=extended&count=200"
|
|
|
|
HOME_ENDPOINT = "https://api.twitter.com/1.1/statuses/home_timeline.json?tweet_mode=extended&count=200"
|
|
|
|
TIMELINE_ENDPOINT = "https://api.twitter.com/1.1/statuses/user_timeline.json?tweet_mode=extended&count=200"
|
|
|
|
LIKES_TIMELINE_ENDPOINT = "https://api.twitter.com/1.1/favorites/list.json?tweet_mode=extended&count=200"
|
|
|
|
LOOKUP_ENDPOINT = "https://api.twitter.com/1.1/statuses/lookup.json?tweet_mode=extended"
|
|
|
|
RETWEET_ENDPOINT = "https://api.twitter.com/1.1/statuses/retweet/"
|
|
|
|
LIKE_ENDPOINT = "https://api.twitter.com/1.1/favorites/create.json"
|
|
|
|
DESTROY_STATUS_ENDPOINT = "https://api.twitter.com/1.1/statuses/destroy/"
|
|
|
|
DESTROY_LIKE_ENDPOINT = "https://api.twitter.com/1.1/favorites/destroy.json"
|
2019-01-01 19:35:22 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
func optLogFatal(decorum string, err error) {
|
2020-09-18 23:22:42 +02:00
|
|
|
if err != nil && err.Error() != "" {
|
2019-01-01 19:35:22 +01:00
|
|
|
fmt.Fprintln(os.Stderr, "drivel: "+decorum+": "+err.Error())
|
|
|
|
os.Exit(-1)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type ObjectID string
|
|
|
|
|
|
|
|
type TwitterError struct {
|
|
|
|
Code int64
|
|
|
|
Message string
|
|
|
|
Label string
|
|
|
|
}
|
|
|
|
|
|
|
|
type Response struct {
|
|
|
|
Errors []TwitterError
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Response) Error() string {
|
|
|
|
if len(r.Errors) == 0 {
|
|
|
|
return ""
|
|
|
|
} else {
|
|
|
|
s, _ := json.Marshal(r)
|
|
|
|
return "Response error " + string(s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func InitRequest(mediaType string, totalBytes int) url.Values {
|
2020-09-18 20:49:46 +02:00
|
|
|
r := map[string][]string{
|
2019-01-01 19:35:22 +01:00
|
|
|
"command": {"INIT"},
|
|
|
|
"media_type": {mediaType},
|
|
|
|
"total_bytes": {strconv.Itoa(totalBytes)},
|
|
|
|
}
|
2020-09-18 20:49:46 +02:00
|
|
|
if mediaType == "video/mp4" {
|
|
|
|
r["media_category"] = []string{"tweet_video"}
|
|
|
|
}
|
|
|
|
return r
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
type InitResponse struct {
|
|
|
|
Response
|
|
|
|
Media_id_string string
|
|
|
|
}
|
|
|
|
|
2020-09-18 23:22:42 +02:00
|
|
|
func AppendRequest(mediaID ObjectID, mediaData string, segmentIndex int) url.Values {
|
2019-01-01 19:35:22 +01:00
|
|
|
return map[string][]string{
|
|
|
|
"command": {"APPEND"},
|
2020-09-18 23:22:42 +02:00
|
|
|
"media_id": {string(mediaID)},
|
2019-01-01 19:35:22 +01:00
|
|
|
"media_data": {mediaData},
|
|
|
|
"segment_index": {strconv.Itoa(segmentIndex)},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-18 23:22:42 +02:00
|
|
|
func FinalizeRequest(mediaID ObjectID) url.Values {
|
2019-01-01 19:35:22 +01:00
|
|
|
return map[string][]string{
|
|
|
|
"command": {"FINALIZE"},
|
2020-09-18 23:22:42 +02:00
|
|
|
"media_id": {string(mediaID)},
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type FinalizeResponse struct {
|
|
|
|
Error string
|
|
|
|
Media_id_string string
|
2020-09-18 20:49:46 +02:00
|
|
|
Processing_info ProcessingInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
type ProcessingInfo struct {
|
|
|
|
State string
|
|
|
|
Check_after_secs int64
|
2020-09-18 23:22:42 +02:00
|
|
|
Progress_percent int64
|
|
|
|
Error TwitterError
|
|
|
|
}
|
|
|
|
|
|
|
|
func PollStatusParameters(mediaID ObjectID) string {
|
|
|
|
return "?command=STATUS&media_id=" + string(mediaID)
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
2020-09-19 22:12:59 +02:00
|
|
|
type PollStatusResponse struct {
|
|
|
|
Processing_info ProcessingInfo
|
|
|
|
}
|
|
|
|
|
2019-01-01 19:35:22 +01:00
|
|
|
func UpdateStatusRequest(status string, mediaIDs []ObjectID, previousStatusID ObjectID) url.Values {
|
|
|
|
r := map[string][]string{"status": {status}}
|
|
|
|
if len(mediaIDs) > 0 {
|
|
|
|
ids := []string{}
|
|
|
|
for _, id := range mediaIDs {
|
|
|
|
ids = append(ids, string(id))
|
|
|
|
}
|
|
|
|
r["media_ids"] = []string{strings.Join(ids, ",")}
|
|
|
|
}
|
|
|
|
if len(previousStatusID) > 0 {
|
|
|
|
r["in_reply_to_status_id"] = []string{string(previousStatusID)}
|
|
|
|
r["auto_populate_reply_metadata"] = []string{"true"}
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
|
|
|
type UpdateStatusResponse struct {
|
|
|
|
Response
|
|
|
|
Id_str string
|
|
|
|
}
|
|
|
|
|
2020-09-19 22:12:59 +02:00
|
|
|
func LookupParameters(ids []string) string {
|
|
|
|
return "&id=" + strings.Join(ids, ",")
|
|
|
|
}
|
|
|
|
|
2019-01-01 19:35:22 +01:00
|
|
|
var mimetype = map[string]string{
|
|
|
|
".mp4": "video/mp4",
|
|
|
|
".jpg": "image/jpeg",
|
|
|
|
".jpeg": "image/jpeg",
|
|
|
|
".png": "image/png",
|
|
|
|
".gif": "image/gif",
|
|
|
|
}
|
|
|
|
|
|
|
|
func getMimetype(file string) string {
|
|
|
|
ext := filepath.Ext(file)
|
|
|
|
if v, ok := mimetype[ext]; ok {
|
|
|
|
return v
|
|
|
|
} else {
|
|
|
|
return "application/octet-stream"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func get(url string) []byte {
|
|
|
|
return _send(url, nil, false)
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func send(url string, vals url.Values) []byte {
|
|
|
|
return _send(url, vals, true)
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func _send(url string, vals url.Values, usePost bool) []byte {
|
2019-01-01 19:35:22 +01:00
|
|
|
log := func(err error) {
|
|
|
|
v, _ := json.Marshal(vals)
|
2020-09-18 23:22:42 +02:00
|
|
|
optLogFatal("get/post "+url+" "+string(v), err)
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
2020-09-17 16:56:18 +02:00
|
|
|
var resp *http.Response
|
|
|
|
var err error
|
|
|
|
if usePost {
|
|
|
|
resp, err = client.PostForm(url, vals)
|
|
|
|
} else {
|
|
|
|
resp, err = client.Get(url)
|
|
|
|
}
|
2019-01-01 19:35:22 +01:00
|
|
|
log(err)
|
|
|
|
defer resp.Body.Close()
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
2020-08-08 12:00:45 +02:00
|
|
|
if resp.StatusCode < 200 || resp.StatusCode > 299 {
|
2020-08-08 11:52:42 +02:00
|
|
|
fmt.Fprintln(os.Stderr, "response:", resp, "\n")
|
|
|
|
fmt.Fprintln(os.Stderr, "body:", string(body), "\n")
|
|
|
|
log(errors.New("HTTP status " + fmt.Sprint(resp.StatusCode)))
|
|
|
|
}
|
2019-01-01 19:35:22 +01:00
|
|
|
log(err)
|
|
|
|
return body
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func initFileUpload(file string, mediaData []byte) ObjectID {
|
2020-09-18 23:22:42 +02:00
|
|
|
log := func(err error) { optLogFatal("initFileUpload "+file, err) }
|
|
|
|
initRequest := InitRequest(getMimetype(file), len(mediaData))
|
2020-09-27 11:46:52 +02:00
|
|
|
body := send(UPLOAD_ENDPOINT, initRequest)
|
2019-01-01 19:35:22 +01:00
|
|
|
var initResponse InitResponse
|
2020-09-18 23:22:42 +02:00
|
|
|
err := json.Unmarshal(body, &initResponse)
|
2019-01-01 19:35:22 +01:00
|
|
|
log(err)
|
2020-09-18 23:22:42 +02:00
|
|
|
log(initResponse)
|
|
|
|
return ObjectID(initResponse.Media_id_string)
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func appendFileChunks(file string, media string, mediaId ObjectID) {
|
2020-09-18 23:22:42 +02:00
|
|
|
log := func(err error) { optLogFatal("appendFileChunks", err) }
|
|
|
|
info := func(v ...interface{}) {
|
|
|
|
if len(media) > CHUNK_SIZE {
|
|
|
|
fmt.Println(v...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
info("chunk upload", file)
|
|
|
|
info("total", len(media))
|
|
|
|
for i := 0; i*CHUNK_SIZE < len(media); i = i + 1 {
|
|
|
|
start := i * CHUNK_SIZE
|
|
|
|
end := (i + 1) * CHUNK_SIZE
|
|
|
|
if end > len(media) {
|
|
|
|
end = len(media)
|
|
|
|
}
|
|
|
|
info("segment", i, "start", start, "end", end)
|
|
|
|
appended := false
|
|
|
|
var body []byte
|
|
|
|
for try := 0; try < 3 && !appended; try++ {
|
|
|
|
appRequest := AppendRequest(mediaId, media[start:end], i)
|
2020-09-27 11:46:52 +02:00
|
|
|
body = send(UPLOAD_ENDPOINT, appRequest)
|
2020-09-18 23:22:42 +02:00
|
|
|
if string(body) == "" {
|
|
|
|
appended = true
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
}
|
2020-09-18 23:22:42 +02:00
|
|
|
if !appended {
|
|
|
|
log(errors.New(string(body)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func finalizeFileUpload(file string, mediaId ObjectID) int64 {
|
2020-09-18 23:22:42 +02:00
|
|
|
log := func(err error) { optLogFatal("finalizeFileUpload", err) }
|
2020-09-27 11:46:52 +02:00
|
|
|
body := send(UPLOAD_ENDPOINT, FinalizeRequest(mediaId))
|
2020-09-18 23:22:42 +02:00
|
|
|
var finalizeResponse FinalizeResponse
|
|
|
|
err := json.Unmarshal(body, &finalizeResponse)
|
|
|
|
log(err)
|
|
|
|
log(errors.New(finalizeResponse.Error))
|
|
|
|
if id := ObjectID(finalizeResponse.Media_id_string); id != "" {
|
|
|
|
fmt.Println("==> Uploaded " + file + " with id " + string(id))
|
|
|
|
procInfo := finalizeResponse.Processing_info
|
|
|
|
return procInfo.Check_after_secs
|
|
|
|
} else {
|
|
|
|
log(errors.New("Could not finalize " + string(mediaId)))
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func wait(seconds int64) {
|
|
|
|
fmt.Println("Waiting", seconds, "seconds")
|
|
|
|
time.Sleep(time.Duration(seconds) * time.Second)
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func pollStatus(mediaId ObjectID) {
|
2020-09-18 23:22:42 +02:00
|
|
|
log := func(err error) { optLogFatal("pollStatus "+string(mediaId), err) }
|
|
|
|
succeeded := false
|
|
|
|
var error TwitterError
|
2020-09-18 23:48:41 +02:00
|
|
|
for try := 0; try < 6; try = try + 1 {
|
2020-09-27 11:46:52 +02:00
|
|
|
body := get(UPLOAD_ENDPOINT + PollStatusParameters(mediaId))
|
2020-09-18 23:22:42 +02:00
|
|
|
var response PollStatusResponse
|
|
|
|
err := json.Unmarshal(body, &response)
|
|
|
|
log(err)
|
|
|
|
procInfo := response.Processing_info
|
|
|
|
state := procInfo.State
|
|
|
|
error = procInfo.Error
|
|
|
|
if state == "succeeded" {
|
|
|
|
succeeded = true
|
|
|
|
break
|
|
|
|
} else if state == "failed" {
|
|
|
|
break
|
|
|
|
} else {
|
|
|
|
fmt.Println("Processing progress: ", procInfo.Progress_percent, "%")
|
|
|
|
seconds := procInfo.Check_after_secs
|
|
|
|
if seconds > 10 {
|
|
|
|
seconds = 10
|
|
|
|
}
|
|
|
|
wait(seconds)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !succeeded {
|
|
|
|
log(errors.New("File upload failed " + error.Message))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func uploadFile(file string) ObjectID {
|
2020-09-18 23:22:42 +02:00
|
|
|
log := func(err error) { optLogFatal("uploadFile "+file, err) }
|
|
|
|
tmpMedia, err := ioutil.ReadFile(file)
|
|
|
|
log(err)
|
|
|
|
media := base64.RawURLEncoding.EncodeToString(tmpMedia)
|
2020-09-27 11:46:52 +02:00
|
|
|
mediaId := initFileUpload(file, tmpMedia)
|
|
|
|
appendFileChunks(file, media, mediaId)
|
|
|
|
seconds := finalizeFileUpload(file, mediaId)
|
2020-09-18 23:22:42 +02:00
|
|
|
if seconds > 0 {
|
|
|
|
wait(seconds)
|
2020-09-27 11:46:52 +02:00
|
|
|
pollStatus(mediaId)
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
2020-09-18 23:22:42 +02:00
|
|
|
return mediaId
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func uploadAll(files []string) []ObjectID {
|
2019-01-01 19:35:22 +01:00
|
|
|
ids := []ObjectID{}
|
|
|
|
for _, f := range files {
|
|
|
|
if f != "" {
|
2020-09-27 11:46:52 +02:00
|
|
|
id := uploadFile(f)
|
2019-01-01 19:35:22 +01:00
|
|
|
ids = append(ids, id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ids
|
|
|
|
}
|
|
|
|
|
|
|
|
type mediaKind int
|
|
|
|
|
|
|
|
const (
|
|
|
|
UNKNOWN mediaKind = iota
|
|
|
|
PIC
|
|
|
|
GIF
|
|
|
|
VIDEO
|
|
|
|
)
|
|
|
|
|
|
|
|
func kind(path string) mediaKind {
|
|
|
|
ext := filepath.Ext(path)
|
|
|
|
switch ext {
|
|
|
|
case ".jpg":
|
|
|
|
fallthrough
|
|
|
|
case ".jpeg":
|
|
|
|
fallthrough
|
|
|
|
case ".png":
|
|
|
|
return PIC
|
|
|
|
case ".gif":
|
|
|
|
return GIF
|
|
|
|
case ".mp4":
|
|
|
|
return VIDEO
|
|
|
|
}
|
|
|
|
return UNKNOWN
|
|
|
|
}
|
|
|
|
|
|
|
|
func splitStatus(status string) []string {
|
|
|
|
split := []string{}
|
|
|
|
words := strings.Split(status, " ")
|
|
|
|
s := ""
|
|
|
|
for _, word := range words {
|
2020-10-12 09:18:07 +02:00
|
|
|
asRunes := []rune(word)
|
|
|
|
if s == "" && len(asRunes) <= CHARACTER_LIMIT {
|
2019-01-01 19:35:22 +01:00
|
|
|
s = word
|
2020-10-12 09:18:07 +02:00
|
|
|
} else if len(s)+1+len(asRunes) <= CHARACTER_LIMIT {
|
2019-01-01 19:35:22 +01:00
|
|
|
s = s + " " + word
|
|
|
|
} else {
|
|
|
|
split = append(split, s)
|
2020-10-12 09:18:07 +02:00
|
|
|
bound := goutil.IntMin(len(asRunes), CHARACTER_LIMIT)
|
|
|
|
s = string(asRunes[:bound])
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if s != "" {
|
|
|
|
split = append(split, s)
|
|
|
|
}
|
|
|
|
return split
|
|
|
|
}
|
|
|
|
|
|
|
|
func exitIfInvalid(path string) {
|
|
|
|
log := func(err error) { optLogFatal("exitIfInvalid", err) }
|
|
|
|
// check existence AND readability
|
|
|
|
f, err := os.Open(path)
|
|
|
|
log(err)
|
|
|
|
defer f.Close()
|
|
|
|
tmp, err := ioutil.ReadAll(f)
|
|
|
|
log(err)
|
|
|
|
if len(tmp) > MAX_BYTES {
|
|
|
|
log(errors.New("File too big: " + path + " is bigger than maximum of " + strconv.Itoa(MAX_BYTES) + " Bytes"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-17 16:56:18 +02:00
|
|
|
func splitArguments(args []string) data {
|
|
|
|
if len(args) < 1 {
|
2020-09-22 08:09:34 +02:00
|
|
|
fmt.Fprintln(os.Stderr, "Usage: drivel status STATUS [FILE1 FILE2 ...]")
|
2019-01-01 19:35:22 +01:00
|
|
|
os.Exit(-1)
|
|
|
|
}
|
|
|
|
d := data{}
|
2020-09-17 16:56:18 +02:00
|
|
|
d.status = splitStatus(args[0])
|
|
|
|
for _, arg := range args[1:] {
|
2019-01-01 19:35:22 +01:00
|
|
|
exitIfInvalid(arg)
|
|
|
|
switch kind(arg) {
|
|
|
|
case PIC:
|
|
|
|
d.pics = append(d.pics, arg)
|
|
|
|
case GIF:
|
|
|
|
d.gifs = append(d.gifs, arg)
|
|
|
|
case VIDEO:
|
|
|
|
d.videos = append(d.videos, arg)
|
|
|
|
default:
|
|
|
|
optLogFatal("splitArguments", errors.New("Unsupported file: "+arg))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return d
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func tweet(status string, mediaIDs []ObjectID, previousID ObjectID) ObjectID {
|
2019-01-01 19:35:22 +01:00
|
|
|
log := func(err error) { optLogFatal("tweet "+status, err) }
|
|
|
|
request := UpdateStatusRequest(status, mediaIDs, previousID)
|
2020-09-27 11:46:52 +02:00
|
|
|
body := send(STATUS_ENDPOINT, request)
|
2019-01-01 19:35:22 +01:00
|
|
|
var sr UpdateStatusResponse
|
|
|
|
err := json.Unmarshal(body, &sr)
|
|
|
|
log(err)
|
|
|
|
if len(sr.Errors) > 0 {
|
|
|
|
log(sr)
|
|
|
|
}
|
|
|
|
fmt.Println("==> Updated status to '" + status + "' with id " + sr.Id_str)
|
|
|
|
return ObjectID(sr.Id_str)
|
|
|
|
}
|
|
|
|
|
|
|
|
type data struct {
|
|
|
|
status []string
|
|
|
|
pics []string
|
|
|
|
gifs []string
|
|
|
|
videos []string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *data) getStatus(i int) string {
|
|
|
|
return goutil.StrSliceAt(d.status, i)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *data) getGif(i int) string {
|
|
|
|
return goutil.StrSliceAt(d.gifs, i)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *data) getVideo(i int) string {
|
|
|
|
return goutil.StrSliceAt(d.videos, i)
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func (d *data) uploadPics(from, to int) []ObjectID {
|
2019-01-01 19:35:22 +01:00
|
|
|
pics := goutil.StrSlice(d.pics, from, to)
|
2020-09-27 11:46:52 +02:00
|
|
|
return uploadAll(pics)
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func (d *data) uploadGif(i int) []ObjectID {
|
2019-01-01 19:35:22 +01:00
|
|
|
gif := d.getGif(i)
|
2020-09-27 11:46:52 +02:00
|
|
|
return uploadAll([]string{gif})
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func (d *data) uploadVideo(i int) []ObjectID {
|
2019-01-01 19:35:22 +01:00
|
|
|
vid := d.getVideo(i)
|
2020-09-27 11:46:52 +02:00
|
|
|
return uploadAll([]string{vid})
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func (d *data) push(previous ObjectID) {
|
2019-01-01 19:35:22 +01:00
|
|
|
if d == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
empty := false
|
|
|
|
i, g, v := 0, 0, 0
|
|
|
|
for !empty {
|
|
|
|
empty = true
|
|
|
|
status := d.getStatus(i)
|
|
|
|
mediaIDs := []ObjectID{}
|
|
|
|
if status != "" {
|
|
|
|
empty = false
|
|
|
|
}
|
|
|
|
from := i * 4
|
|
|
|
to := (i + 1) * 4
|
2020-09-27 11:46:52 +02:00
|
|
|
mediaIDs = d.uploadPics(from, to)
|
2019-01-01 19:35:22 +01:00
|
|
|
if len(mediaIDs) == 0 {
|
2020-09-27 11:46:52 +02:00
|
|
|
mediaIDs = d.uploadGif(g)
|
2019-01-01 19:35:22 +01:00
|
|
|
g++
|
|
|
|
}
|
|
|
|
if len(mediaIDs) == 0 {
|
2020-09-27 11:46:52 +02:00
|
|
|
mediaIDs = d.uploadVideo(v)
|
2019-01-01 19:35:22 +01:00
|
|
|
v++
|
|
|
|
}
|
|
|
|
if len(mediaIDs) > 0 {
|
|
|
|
empty = false
|
|
|
|
}
|
|
|
|
if !empty {
|
2020-09-27 11:46:52 +02:00
|
|
|
previous = tweet(status, mediaIDs, previous)
|
2019-01-01 19:35:22 +01:00
|
|
|
i++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-19 22:12:59 +02:00
|
|
|
func updateStatus(args []string, previous ObjectID, embedTweet ObjectID) {
|
2020-09-17 16:56:18 +02:00
|
|
|
d := splitArguments(args)
|
2020-09-19 22:12:59 +02:00
|
|
|
if embedTweet != "" {
|
2020-09-27 11:46:52 +02:00
|
|
|
tweets := _lookup([]string{string(embedTweet)})
|
2020-09-19 22:12:59 +02:00
|
|
|
if len(tweets) == 1 {
|
|
|
|
d.status[0] += " " + tweets[0].URL()
|
|
|
|
}
|
|
|
|
}
|
2020-09-27 11:46:52 +02:00
|
|
|
d.push(previous)
|
2020-09-17 21:30:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func status(args []string) error {
|
2020-09-19 22:12:59 +02:00
|
|
|
updateStatus(args, "", "")
|
2020-09-17 16:56:18 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-17 21:44:57 +02:00
|
|
|
func reply(args []string) error {
|
|
|
|
if len(args) < 2 {
|
2020-09-22 08:09:34 +02:00
|
|
|
fmt.Fprintln(os.Stderr, "Usage: drivel reply TWEET_ID MESSAGE [FILE1 FILE2 ...]")
|
2020-09-17 21:44:57 +02:00
|
|
|
os.Exit(-1)
|
|
|
|
}
|
2020-09-19 22:12:59 +02:00
|
|
|
updateStatus(args[1:], ObjectID(args[0]), "")
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func quote(args []string) error {
|
|
|
|
if len(args) < 2 {
|
2020-09-22 08:09:34 +02:00
|
|
|
fmt.Println(os.Stderr, "Usage: drivel quote TWEET_ID MESSAGE [FILE1 FILE2 ...]")
|
2020-09-19 22:12:59 +02:00
|
|
|
os.Exit(-1)
|
|
|
|
}
|
|
|
|
updateStatus(args[1:], "", ObjectID(args[0]))
|
2020-09-17 21:44:57 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
type TwitterTime struct {
|
|
|
|
time.Time
|
|
|
|
}
|
|
|
|
|
|
|
|
func (twt *TwitterTime) UnmarshalJSON(b []byte) error {
|
|
|
|
s := strings.Trim(string(b), "\"")
|
|
|
|
var err error
|
|
|
|
twt.Time, err = time.Parse(time.RubyDate, s)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-09-17 21:44:57 +02:00
|
|
|
type Status struct {
|
2020-09-22 08:09:34 +02:00
|
|
|
Full_text string
|
|
|
|
Id_str string
|
2020-09-27 11:46:52 +02:00
|
|
|
Created_at TwitterTime
|
2020-09-22 08:09:34 +02:00
|
|
|
In_reply_to_screen_name string
|
|
|
|
In_reply_to_status_id_str string
|
|
|
|
User StatusUser
|
|
|
|
Quoted_status *Status
|
|
|
|
Retweeted_status *Status
|
|
|
|
Extended_entities Entities
|
2020-09-20 09:48:52 +02:00
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func (t Status) equals(t2 Status) bool {
|
|
|
|
return t.Id_str == t2.Id_str
|
|
|
|
}
|
|
|
|
|
2020-09-20 09:48:52 +02:00
|
|
|
type Entities struct {
|
|
|
|
Media []Media
|
|
|
|
}
|
|
|
|
|
|
|
|
type Media struct {
|
|
|
|
Media_url string
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
|
2020-09-22 08:09:34 +02:00
|
|
|
func (m Status) InReplyTo() string {
|
|
|
|
if m.In_reply_to_status_id_str != "" {
|
|
|
|
return m.In_reply_to_screen_name + " (" + m.In_reply_to_status_id_str + ")"
|
|
|
|
} else {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-17 21:44:57 +02:00
|
|
|
func (m Status) String() string {
|
2020-09-20 21:54:38 +02:00
|
|
|
if m.Retweeted_status != nil {
|
|
|
|
return m.User.Screen_name + " retweeted " + m.Retweeted_status.String()
|
|
|
|
}
|
2020-09-22 08:09:34 +02:00
|
|
|
s := m.User.Screen_name + " " + "(" + m.Id_str + ")"
|
|
|
|
if replyTo := m.InReplyTo(); replyTo != "" {
|
|
|
|
s += " in reply to " + replyTo
|
|
|
|
}
|
2020-09-22 14:30:50 +02:00
|
|
|
s += ":\n" + html.UnescapeString(m.Full_text)
|
2020-09-20 09:48:52 +02:00
|
|
|
allMedia := m.Extended_entities.Media
|
|
|
|
if len(allMedia) > 0 {
|
|
|
|
s += "\n\nMedia:"
|
|
|
|
for _, media := range allMedia {
|
|
|
|
s += " " + media.Media_url
|
|
|
|
}
|
|
|
|
}
|
2020-09-20 09:27:30 +02:00
|
|
|
if m.Quoted_status != nil {
|
2020-09-20 09:32:23 +02:00
|
|
|
s += "\n\nQuotes " + m.Quoted_status.String()
|
2020-09-20 09:27:30 +02:00
|
|
|
}
|
|
|
|
return s
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
|
2020-09-22 08:09:34 +02:00
|
|
|
func PrintTweets(tweets []Status) {
|
|
|
|
for _, tweet := range tweets {
|
|
|
|
fmt.Println(tweet)
|
|
|
|
fmt.Println("---------")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-19 22:12:59 +02:00
|
|
|
func (m Status) URL() string {
|
|
|
|
return "https://twitter.com/" + m.User.Screen_name + "/status/" + m.Id_str
|
|
|
|
}
|
|
|
|
|
2020-09-17 21:44:57 +02:00
|
|
|
type StatusUser struct {
|
2020-09-19 22:12:59 +02:00
|
|
|
Name string
|
|
|
|
Screen_name string
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func _lookup(ids []string) []Status {
|
2020-09-19 22:12:59 +02:00
|
|
|
log := func(err error) { optLogFatal("lookup "+strings.Join(ids, ","), err) }
|
2020-09-27 11:46:52 +02:00
|
|
|
body := get(LOOKUP_ENDPOINT + LookupParameters(ids))
|
2020-09-19 22:12:59 +02:00
|
|
|
var tweets []Status
|
|
|
|
err := json.Unmarshal(body, &tweets)
|
|
|
|
log(err)
|
|
|
|
return tweets
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
|
2020-09-22 08:09:34 +02:00
|
|
|
func lookup(args []string) error {
|
|
|
|
if len(args) < 1 {
|
|
|
|
fmt.Fprintln(os.Stderr, "USAGE: drivel lookup TWEET_ID1 [TWEET_ID2 TWEET_ID3 ...]")
|
|
|
|
os.Exit(-1)
|
|
|
|
}
|
2020-09-27 11:46:52 +02:00
|
|
|
tweets := _lookup(args)
|
2020-09-22 08:09:34 +02:00
|
|
|
PrintTweets(tweets)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func timeline(endpoint string, quiet bool) []Status {
|
2020-09-17 21:44:57 +02:00
|
|
|
log := func(err error) { optLogFatal("timeline", err) }
|
2020-09-27 11:46:52 +02:00
|
|
|
body := get(endpoint)
|
2020-09-17 21:44:57 +02:00
|
|
|
var tweets []Status
|
|
|
|
err := json.Unmarshal(body, &tweets)
|
2020-09-17 16:56:18 +02:00
|
|
|
log(err)
|
2020-09-27 11:46:52 +02:00
|
|
|
if !quiet {
|
|
|
|
PrintTweets(tweets)
|
|
|
|
}
|
|
|
|
return tweets
|
2020-09-17 21:44:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func mentions(args []string) error {
|
2020-09-27 11:46:52 +02:00
|
|
|
timeline(MENTIONS_ENDPOINT, false)
|
2020-09-17 16:56:18 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-17 21:44:57 +02:00
|
|
|
func home(args []string) error {
|
2020-09-27 11:46:52 +02:00
|
|
|
timeline(HOME_ENDPOINT, false)
|
2020-09-17 21:30:51 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-20 20:37:12 +02:00
|
|
|
func UserTimelineParameters(screenName string) string {
|
|
|
|
return "&screen_name=" + screenName
|
|
|
|
}
|
|
|
|
|
|
|
|
func userTimeline(args []string) error {
|
2020-09-27 11:46:52 +02:00
|
|
|
timeline(TIMELINE_ENDPOINT+UserTimelineParameters(args[0]), false)
|
2020-09-20 20:37:12 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-19 22:31:09 +02:00
|
|
|
func RetweetParameters(id string) string {
|
|
|
|
return id + ".json"
|
|
|
|
}
|
|
|
|
|
|
|
|
func retweet(args []string) error {
|
|
|
|
log := func(err error) { optLogFatal("retweet", err) }
|
|
|
|
if len(args) != 1 {
|
|
|
|
fmt.Fprintln(os.Stderr, "USAGE: drivel retweet TWEET_ID")
|
|
|
|
os.Exit(-1)
|
|
|
|
}
|
|
|
|
id := args[0]
|
2020-09-27 11:46:52 +02:00
|
|
|
tweets := _lookup([]string{id})
|
2020-09-19 22:31:09 +02:00
|
|
|
if len(tweets) != 1 {
|
|
|
|
log(errors.New("Could not find tweet " + id))
|
|
|
|
}
|
2020-09-27 11:46:52 +02:00
|
|
|
body := send(RETWEET_ENDPOINT+RetweetParameters(id), nil)
|
2020-09-19 22:31:09 +02:00
|
|
|
var retweet Status
|
|
|
|
err := json.Unmarshal(body, &retweet)
|
|
|
|
log(err)
|
|
|
|
fmt.Println("Retweeted", tweets[0])
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-18 09:18:32 +02:00
|
|
|
func LikeRequest(id string) url.Values {
|
|
|
|
return map[string][]string{
|
|
|
|
"id": {id},
|
|
|
|
"tweet_mode": {"extended"},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func like(args []string) error {
|
|
|
|
log := func(err error) { optLogFatal("like", err) }
|
|
|
|
if len(args) != 1 {
|
|
|
|
fmt.Fprintln(os.Stderr, "USAGE: drivel like TWEET_ID")
|
|
|
|
os.Exit(-1)
|
|
|
|
}
|
2020-09-27 11:46:52 +02:00
|
|
|
body := send(LIKE_ENDPOINT, LikeRequest(args[0]))
|
2020-09-18 09:18:32 +02:00
|
|
|
var tweet Status
|
|
|
|
err := json.Unmarshal(body, &tweet)
|
|
|
|
log(err)
|
|
|
|
fmt.Println("Liked", tweet)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-27 11:46:52 +02:00
|
|
|
func equals(t1 []Status, t2 []Status) bool {
|
|
|
|
if len(t1) != len(t2) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
for i := range t1 {
|
|
|
|
if !t1[i].equals(t2[i]) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
func UnlikeRequest(id string) url.Values {
|
|
|
|
return map[string][]string{
|
|
|
|
"id": {id},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func unlike(id string) {
|
|
|
|
log := func(err error) { optLogFatal("unlike", err) }
|
|
|
|
body := send(DESTROY_LIKE_ENDPOINT, UnlikeRequest(id))
|
|
|
|
var tweet Status
|
|
|
|
err := json.Unmarshal(body, &tweet)
|
|
|
|
log(err)
|
|
|
|
fmt.Println("Unliked", tweet.Id_str)
|
|
|
|
}
|
|
|
|
|
|
|
|
func DestroyParameters(id string) string {
|
|
|
|
return id + ".json"
|
|
|
|
}
|
|
|
|
|
|
|
|
func destroyStatus(id string) {
|
|
|
|
log := func(err error) { optLogFatal("destroy", err) }
|
|
|
|
body := send(DESTROY_STATUS_ENDPOINT+DestroyParameters(id), nil)
|
|
|
|
var tweet Status
|
|
|
|
err := json.Unmarshal(body, &tweet)
|
|
|
|
log(err)
|
|
|
|
fmt.Println("Destroyed", tweet.Id_str)
|
|
|
|
}
|
|
|
|
|
|
|
|
func wipeTimeline(likes bool, keepDays int) {
|
|
|
|
var endpoint string
|
|
|
|
if likes {
|
|
|
|
endpoint = LIKES_TIMELINE_ENDPOINT
|
|
|
|
} else {
|
|
|
|
endpoint = TIMELINE_ENDPOINT
|
|
|
|
}
|
|
|
|
n := 0
|
|
|
|
now := time.Now()
|
|
|
|
tweets := timeline(endpoint, true)
|
|
|
|
for {
|
|
|
|
for _, tweet := range tweets {
|
|
|
|
daysSince := now.Sub(tweet.Created_at.Time).Hours() / 24
|
|
|
|
if tweet.Created_at != (TwitterTime{}) && daysSince >= float64(keepDays) {
|
|
|
|
if likes {
|
|
|
|
unlike(tweet.Id_str)
|
|
|
|
} else {
|
|
|
|
destroyStatus(tweet.Id_str)
|
|
|
|
}
|
|
|
|
n++
|
|
|
|
if n >= 200 {
|
|
|
|
fmt.Println("reached limit of 200")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
newTweets := timeline(endpoint, true)
|
|
|
|
if !equals(newTweets, tweets) {
|
|
|
|
tweets = newTweets
|
|
|
|
} else {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func wipe(flags wipeFlags) error {
|
|
|
|
wipeTimeline(true, flags.keepDays)
|
|
|
|
wipeTimeline(false, flags.keepDays)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type wipeFlags struct {
|
|
|
|
keepDays int
|
|
|
|
}
|
|
|
|
|
|
|
|
func wipeCommand() (goutil.CommandFlagsInit, goutil.CommandFunc) {
|
|
|
|
f := wipeFlags{}
|
|
|
|
flagsInit := func(s *flag.FlagSet) {
|
|
|
|
s.IntVar(&f.keepDays, "keepDays", WIPE_KEEP_DAYS, "don't wipe the last N days")
|
|
|
|
}
|
|
|
|
return flagsInit, func([]string) error { return wipe(f) }
|
|
|
|
}
|
|
|
|
|
|
|
|
var client *http.Client
|
|
|
|
|
2020-09-17 16:56:18 +02:00
|
|
|
func main() {
|
2020-09-27 11:46:52 +02:00
|
|
|
client = getClient()
|
2020-09-17 16:56:18 +02:00
|
|
|
commands := []goutil.Command{
|
|
|
|
goutil.NewCommand("status", status, "post a status with message and/or media"),
|
2020-09-17 21:44:57 +02:00
|
|
|
goutil.NewCommand("home", home, "get your home timeline"),
|
|
|
|
goutil.NewCommand("mentions", mentions, "get your mention timeline"),
|
2020-09-20 20:37:12 +02:00
|
|
|
goutil.NewCommand("timeline", userTimeline, "get timeline of a specific user"),
|
2020-09-22 08:09:34 +02:00
|
|
|
goutil.NewCommand("lookup", lookup, "lookup tweets with specific IDs"),
|
2020-09-17 21:30:51 +02:00
|
|
|
goutil.NewCommand("reply", reply, "reply to a tweet with a specific ID"),
|
2020-09-19 22:12:59 +02:00
|
|
|
goutil.NewCommand("quote", quote, "quote retweet a tweet with a specific ID"),
|
2020-09-19 22:31:09 +02:00
|
|
|
goutil.NewCommand("retweet", retweet, "retweet a tweet with a specific ID"),
|
2020-09-18 09:18:32 +02:00
|
|
|
goutil.NewCommand("like", like, "like a tweet with a specific ID"),
|
2020-09-27 11:46:52 +02:00
|
|
|
goutil.NewCommandWithFlags("wipe", wipeCommand, "wipe your timeline and likes"),
|
2020-09-17 16:56:18 +02:00
|
|
|
}
|
|
|
|
err := goutil.Execute(commands)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintln(os.Stderr, err)
|
|
|
|
os.Exit(1)
|
|
|
|
}
|
2019-01-01 19:35:22 +01:00
|
|
|
}
|