11 Commits
1.7 ... 1.9

Author SHA1 Message Date
Pijus Kamandulis
feee0a9154 Added support for vm.tiktok.com urls 2020-03-22 12:38:08 +02:00
Pijus Kamandulis
af7972685e Fixed circular dependency issue 2020-03-22 00:22:08 +02:00
Pijus Kamandulis
f9d35e3bf2 TTDL-7 Added flag; Code clean up 2020-03-22 02:10:24 +02:00
Pijus Kamandulis
9a65746fd4 Update go.yml 2020-02-25 21:44:43 +02:00
Pijus Kamandulis
70c605a696 Merge pull request #6 from intracomof/master
Download videos by hashtag; limit option; get just json data
2020-02-25 21:33:57 +02:00
alexpin
208bffb846 error handling 2020-02-25 21:16:57 +02:00
alexpin
7b9b7688a1 formatter 2020-02-25 21:03:06 +02:00
intracomof
e77c904f89 Merge branch 'master' into master 2020-02-25 21:01:43 +02:00
alexpin
68612282ee default limit value updated; WaitReady(video) removed 2020-02-25 20:55:56 +02:00
alexpin
b6bb470064 formatter 2020-02-25 01:01:10 +02:00
alexpin
f724f0f2a2 Download videos by hashtag; get json data without video downloading; limit option 2020-02-25 00:56:19 +02:00
25 changed files with 437 additions and 63 deletions

View File

@@ -1,5 +1,5 @@
name: tiktok-dl_CI
on: [push]
on: [push, pull_request]
jobs:
build:
strategy:

View File

@@ -16,11 +16,14 @@ You can download items listed in a text file by running `./tiktok-dl [OPTIONS] -
Clone this repository and run `go build` to build the executable.
## Available options
* `-debug` - enables debug mode
* `-output some_directory` - Output path (default "./downloads")
* `-metadata` - Write video metadata to a .json file
* `-archive` - Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.
* `-batch-file` - File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.
* `-deadline` - Sets the timout for scraper logic in seconds (used as a workaround for context deadline exceeded error) (default 1500)
* `-debug` - enables debug mode
* `-json` - Returns whole data, that was scraped from TikTok, in json
* `-limit` - Sets the max count of video that will be downloaded (default infinity)
* `-metadata` - Write video metadata to a .json file
* `-output some_directory` - Output path (default "./downloads")
* `-quiet` - Supress output
## Acknowledgments

View File

@@ -3,15 +3,16 @@ package client
import (
"context"
"errors"
"github.com/chromedp/chromedp"
"io/ioutil"
"log"
"os"
"strings"
"time"
"github.com/chromedp/chromedp"
config "../models/config"
utils "../utils"
log "../utils/log"
)
// GetMusicUploads - Get all uploads by given music
@@ -33,7 +34,7 @@ func executeClientAction(url string, jsAction string) (string, error) {
ctx, cancel := chromedp.NewContext(
allocCtx,
chromedp.WithLogf(log.Printf),
chromedp.WithLogf(log.Logf),
)
defer cancel()
@@ -84,9 +85,9 @@ func runScrapeWithInfo(ctx context.Context, jsAction string, url string) (string
}
if jsOutput != "0" {
utils.Logf("\rPreloading... Currently loaded %s items.", jsOutput)
log.Logf("\rPreloading... %s items have been found.", jsOutput)
} else {
utils.Logf("\rPreloading...")
log.Logf("\rPreloading...")
}
if err := chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.finished.toString()", &jsOutput)); err != nil {
@@ -100,7 +101,7 @@ func runScrapeWithInfo(ctx context.Context, jsAction string, url string) (string
time.Sleep(50 * time.Millisecond)
}
utils.Log("\nRetrieving items...")
log.Log("\nRetrieving items...")
if err := chromedp.Run(ctx,
// Wait until custom js finishes
chromedp.WaitVisible(`video_urls`),

View File

@@ -0,0 +1,28 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
)
// GetHashtagUploads - Get all uploads marked with given hashtag
func GetHashtagUploads(hashtagURL string) ([]models.Upload, error) {
actionOutput, err := GetHashtagUploadsJSON(hashtagURL)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetHashtagUploadsJSON - Get hashtag uploads scrape
func GetHashtagUploadsJSON(hashtagURL string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(hashtagURL, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
}

View File

@@ -1,14 +1,27 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
)
// GetMusicUploads - Get all uploads by given music
func GetMusicUploads(url string) ([]models.Upload, error) {
actionOutput, err := executeClientAction(url, "bootstrapIteratingVideos()")
actionOutput, err := GetMusicUploadsJSON(url)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetMusicUploadsJSON - Get music uploads scrape
func GetMusicUploadsJSON(url string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(url, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
}

52
client/getRedirectUrl.go Normal file
View File

@@ -0,0 +1,52 @@
package client
import (
"context"
"github.com/chromedp/chromedp"
"io/ioutil"
"os"
"time"
config "../models/config"
log "../utils/log"
)
func GetRedirectUrl(url string) (string, error) {
dir, err := ioutil.TempDir("", "chromedp-example")
if err != nil {
return "", err
}
defer os.RemoveAll(dir)
opts := append(chromedp.DefaultExecAllocatorOptions[:],
chromedp.DisableGPU,
chromedp.UserDataDir(dir),
chromedp.Flag("headless", !config.Config.Debug),
)
allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...)
defer cancel()
ctx, cancel := chromedp.NewContext(
allocCtx,
chromedp.WithLogf(log.Logf),
)
defer cancel()
ctx, cancel = context.WithTimeout(ctx, time.Duration(config.Config.Deadline)*time.Second)
defer cancel()
var jsOutput string
if err := chromedp.Run(ctx,
// Navigate to user's page
chromedp.Navigate(url),
// Wait until page loads
chromedp.WaitReady(`div`),
// Grab url links from our element
chromedp.EvaluateAsDevTools(`window.location.href`, &jsOutput),
); err != nil {
return "", err
}
return jsOutput, err
}

View File

@@ -1,14 +1,27 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
)
// GetUserUploads - Get all uploads by user
func GetUserUploads(username string) ([]models.Upload, error) {
actionOutput, err := executeClientAction(`https://www.tiktok.com/@`+username, "bootstrapIteratingVideos()")
actionOutput, err := GetUserUploadsJSON(username)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetUserUploadsJSON - Get user uploads scrape
func GetUserUploadsJSON(username string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(`https://www.tiktok.com/@`+username, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
}

View File

@@ -11,20 +11,26 @@ var Config struct {
URL string
OutputPath string
BatchFilePath string
ArchiveFilePath string
Debug bool
MetaData bool
Quiet bool
JSONOnly bool
Deadline int
Limit int
}
// GetConfig - Returns Config object
func GetConfig() {
outputPath := flag.String("output", "./downloads", "Output path")
batchFilePath := flag.String("batch-file", "", "File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.")
archive := flag.String("archive", "", "Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.")
debug := flag.Bool("debug", false, "Enables debug mode")
metadata := flag.Bool("metadata", false, "Write video metadata to a .json file")
quiet := flag.Bool("quiet", false, "Supress output")
jsonOnly := flag.Bool("json", false, "Just get JSON data from scraper (without video downloading)")
deadline := flag.Int("deadline", 1500, "Sets the timout for scraper logic in seconds (used as a workaround for 'context deadline exceeded' error)")
limit := flag.Int("limit", 0, "Sets the videos count limit (useful when there too many videos from the user or by hashtag)")
flag.Parse()
args := flag.Args()
@@ -41,8 +47,14 @@ func GetConfig() {
}
Config.OutputPath = *outputPath
Config.BatchFilePath = *batchFilePath
Config.ArchiveFilePath = *archive
Config.Debug = *debug
Config.MetaData = *metadata
Config.Quiet = *quiet
if *jsonOnly {
Config.Quiet = true
}
Config.JSONOnly = *jsonOnly
Config.Deadline = *deadline
Config.Limit = *limit
}

View File

@@ -1,11 +1,13 @@
package models
import (
res "../resources"
utils "../utils"
"encoding/json"
"os"
"strings"
res "../resources"
checkErr "../utils/checkErr"
log "../utils/log"
)
// Upload - Upload object
@@ -47,16 +49,16 @@ func (u Upload) GetUploadID() string {
func (u Upload) WriteToFile(outputPath string) {
bytes, err := json.Marshal(u)
if err != nil {
utils.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID())
log.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID())
panic(err)
}
// Create the file
out, err := os.Create(outputPath)
utils.CheckErr(err)
checkErr.CheckErr(err)
defer out.Close()
// Write to file
_, err = out.Write(bytes)
utils.CheckErr(err)
checkErr.CheckErr(err)
}

View File

@@ -1,10 +1,11 @@
package models
import (
testUtil "../unitTestUtil"
utils "../utils"
"os"
"testing"
testUtil "../unitTestUtil"
fileio "../utils/fileio"
)
func TestParseUploads(t *testing.T) {
@@ -62,7 +63,7 @@ func TestWriteToFile(t *testing.T) {
upload.WriteToFile(filePath)
actual := utils.ReadFileToString(filePath)
actual := fileio.ReadFileToString(filePath)
tu.AssertString(actual, expected, "File content")
os.Remove(filePath)

View File

@@ -1,7 +1,7 @@
optStrings = {
selectors: {
feedLoading: 'div.tiktok-loading.feed-loading',
modalArrowLeft: 'div.video-card-modal > div > img.arrow-right',
modalArrowRight: 'div.video-card-modal > div > img.arrow-right',
modalClose: '.video-card-modal > div > div.close',
modalPlayer: 'div > div > main > div.video-card-modal > div > div.video-card-big > div.video-card-container > div > div > video',
modalShareInput: '.copy-link-container > input',
@@ -36,6 +36,7 @@ optStrings = {
currentState = {
preloadCount: 0,
finished: false,
limit: 0
};
checkForErrors = function() {
@@ -65,8 +66,14 @@ buldVidUrlArray = function(finishCallback) {
var videoArray = [];
var intervalID = window.setInterval(x => {
videoArray.push(getCurrentModalVideo());
var arrowRight = document.querySelectorAll(optStrings.selectors.modalArrowLeft)[0];
if(currentState.limit > 0) {
if (videoArray.length >= currentState.limit) {
window.clearInterval(intervalID);
document.querySelector(optStrings.selectors.modalClose).click();
finishCallback(videoArray);
}
}
var arrowRight = document.querySelectorAll(optStrings.selectors.modalArrowRight)[0];
if (arrowRight.classList.contains(optStrings.classes.modalCloseDisabled)) {
window.clearInterval(intervalID);
document.querySelector(optStrings.selectors.modalClose).click();
@@ -127,6 +134,12 @@ scrollWhileNew = function(finishCallback) {
var intervalID = window.setInterval(x => {
var oldCount = state.count;
state.count = document.getElementsByClassName(optStrings.classes.feedVideoItem).length;
if(currentState.limit > 0) {
if (currentState.preloadCount >= currentState.limit || state.count >= currentState.limit) {
finishCallback(createVidUrlElement);
window.clearInterval(intervalID);
}
}
if(checkForErrors()) {
window.clearInterval(intervalID);
return;
@@ -145,7 +158,8 @@ scrollWhileNew = function(finishCallback) {
}, 1000);
};
bootstrapIteratingVideos = function() {
bootstrapIteratingVideos = function(limit) {
currentState.limit = limit;
scrollWhileNew(buldVidUrlArray);
return 'bootstrapIteratingVideos';
};

54
utils/archive.go Normal file
View File

@@ -0,0 +1,54 @@
package utils
import (
models "../models"
config "../models/config"
fileio "./fileio"
log "./log"
)
// IsItemInArchive - Checks if the item is already archived
func IsItemInArchive(upload models.Upload) bool {
if len(RemoveArchivedItems([]models.Upload{upload})) == 0 {
return true
}
return false
}
// RemoveArchivedItems - Returns items slice without archived items
func RemoveArchivedItems(uploads []models.Upload) []models.Upload {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" || !fileio.CheckIfExists(archiveFilePath) {
return uploads
}
removeArchivedItemsDelegate := func(archivedItem string) {
for i, upload := range uploads {
if upload.GetUploadID() == archivedItem {
uploads = append(uploads[:i], uploads[i+1:]...)
}
}
}
lenBeforeRemoval := len(uploads)
fileio.ReadFileLineByLine(archiveFilePath, removeArchivedItemsDelegate)
removedCount := lenBeforeRemoval - len(uploads)
if removedCount > 0 {
log.Logf("%d items, found in archive. Skipping...\n", removedCount)
}
return uploads
}
// AddItemToArchive - Adds item to archived list
func AddItemToArchive(uploadID string) {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" {
return
}
fileio.AppendToFile(uploadID, archiveFilePath)
}

View File

@@ -4,21 +4,23 @@ import (
"io"
"net/http"
"os"
checkErr "./checkErr"
)
// DownloadFile - Downloads content from `url` and stores it in `outputPath`
func DownloadFile(outputPath string, url string) {
// Get the data
resp, err := http.Get(url)
CheckErr(err)
checkErr.CheckErr(err)
defer resp.Body.Close()
// Create the file
out, err := os.Create(outputPath)
CheckErr(err)
checkErr.CheckErr(err)
defer out.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
CheckErr(err)
checkErr.CheckErr(err)
}

View File

@@ -4,6 +4,8 @@ import (
"bufio"
"io/ioutil"
"os"
checkErr "../checkErr"
)
type delegateString func(string)
@@ -37,7 +39,7 @@ func ReadFileToString(path string) string {
// ReadFileLineByLine - Reads file line by line and calls delegate
func ReadFileLineByLine(path string, delegate delegateString) {
file, err := os.Open(path)
CheckErr(err)
checkErr.CheckErr(err)
defer file.Close()
scanner := bufio.NewScanner(file)
@@ -49,3 +51,14 @@ func ReadFileLineByLine(path string, delegate delegateString) {
panic(err)
}
}
// AppendToFile - Appends line to file
func AppendToFile(str string, filePath string) {
f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
checkErr.CheckErr(err)
defer f.Close()
if _, err := f.WriteString(str + "\n"); err != nil {
checkErr.CheckErr(err)
}
}

16
utils/getHashtag.go Normal file
View File

@@ -0,0 +1,16 @@
package utils
import (
res "../resources"
"fmt"
"strings"
)
// GetHashtagFromURL - Get's tag name from passed url
func GetHashtagFromURL(str string) string {
if match := strings.Contains(str, "/tag/"); match {
return strings.Split(str, "/tag/")[1]
}
panic(fmt.Sprintf(res.ErrorCouldNotRecogniseURL, str))
}

View File

@@ -1,9 +1,10 @@
package utils
import (
config "../models/config"
"fmt"
"os"
config "../../models/config"
)
// Log - Write to std out

View File

@@ -2,11 +2,13 @@ package utils
import (
"io/ioutil"
checkErr "./checkErr"
)
// ReadFileAsString - Returns contents of given file
func ReadFileAsString(fileName string) string {
content, err := ioutil.ReadFile(fileName)
CheckErr(err)
checkErr.CheckErr(err)
return string(content)
}

View File

@@ -2,7 +2,8 @@ package workflows
import (
res "../resources"
utils "../utils"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadBatchFile - Check's if DownloadBatchFile can be used
@@ -12,11 +13,11 @@ func CanUseDownloadBatchFile(batchFilePath string) bool {
// DownloadBatchFile - Download items from batch file
func DownloadBatchFile(batchFilePath string) {
if !utils.CheckIfExists(batchFilePath) {
utils.LogFatal(res.ErrorPathNotFound, batchFilePath)
if !fileio.CheckIfExists(batchFilePath) {
log.LogFatal(res.ErrorPathNotFound, batchFilePath)
}
utils.ReadFileLineByLine(batchFilePath, downloadItem)
fileio.ReadFileLineByLine(batchFilePath, downloadItem)
}
func downloadItem(batchItem string) {

View File

@@ -0,0 +1,52 @@
package workflows
import (
"fmt"
"strings"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadHashtag - Test's if this workflow can be used for parameter
func CanUseDownloadHashtag(url string) bool {
match := strings.Contains(url, "/tag/")
return match
}
// DownloadHashtag - Download videos marked with given hashtag
func DownloadHashtag(url string) {
uploads, err := client.GetHashtagUploads(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
hashtag := utils.GetHashtagFromURL(url)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, hashtag)
fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads {
downloadVideo(upload, downloadDir)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
log.Log()
}
// GetHashtagJSON - Prints scraped info from hashtag
func GetHashtagJSON(url string) {
uploads, err := client.GetHashtagUploadsJSON(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
}

View File

@@ -1,12 +1,15 @@
package workflows
import (
"fmt"
"regexp"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadMusic - Check's if DownloadMusic can be used for parameter
@@ -19,18 +22,30 @@ func CanUseDownloadMusic(url string) bool {
func DownloadMusic(url string) {
uploads, err := client.GetMusicUploads(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
for index, upload := range uploads {
username := utils.GetUsernameFromString(upload.Uploader)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
utils.Log()
log.Log()
}
// GetMusicJSON - Prints scraped info from music
func GetMusicJSON(url string) {
uploads, err := client.GetMusicUploadsJSON(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
}

View File

@@ -0,0 +1,27 @@
package workflows
import (
client "../client"
res "../resources"
log "../utils/log"
"regexp"
)
// CanUseDownloadShareLink - Check's if DownloadShareLink can be used
func CanUseDownloadShareLink(url string) bool {
match, _ := regexp.MatchString("vm.tiktok.com\\/.+", url)
return match
}
// DownloadShareLink - Download item by share link
func DownloadShareLink(url string) {
log.Logf("Resolving share link: %s\n", url)
finalURL, err := client.GetRedirectUrl(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
StartWorkflowByParameter(finalURL)
}

View File

@@ -1,13 +1,16 @@
package workflows
import (
"fmt"
"regexp"
"strings"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
"strings"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadUser - Test's if this workflow can be used for parameter
@@ -21,17 +24,30 @@ func CanUseDownloadUser(url string) bool {
func DownloadUser(username string) {
uploads, err := client.GetUserUploads(username)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads {
downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
utils.Log()
log.Log()
}
// GetUserVideosJSON - Prints scraped info from user
func GetUserVideosJSON(username string) {
uploads, err := client.GetUserUploadsJSON(username)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
}

View File

@@ -1,13 +1,16 @@
package workflows
import (
"fmt"
"regexp"
client "../client"
models "../models"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadSingleVideo - Check's if DownloadSingleVideo can be used for parameter
@@ -21,14 +24,18 @@ func DownloadSingleVideo(url string) {
username := utils.GetUsernameFromString(url)
upload, err := client.GetVideoDetails(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
if utils.IsItemInArchive(upload) {
return
}
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir)
utils.Log("[1/1] Downloaded\n")
log.Log("[1/1] Downloaded\n")
}
// DownloadVideo - Downloads one video
@@ -36,7 +43,7 @@ func downloadVideo(upload models.Upload, downloadDir string) {
uploadID := upload.GetUploadID()
downloadPath := fmt.Sprintf("%s/%s.mp4", downloadDir, uploadID)
if utils.CheckIfExists(downloadPath) {
if fileio.CheckIfExists(downloadPath) {
return
}
@@ -46,4 +53,6 @@ func downloadVideo(upload models.Upload, downloadDir string) {
metadataPath := fmt.Sprintf("%s/%s.json", downloadDir, uploadID)
upload.WriteToFile(metadataPath)
}
utils.AddItemToArchive(upload.GetUploadID())
}

View File

@@ -1,8 +1,10 @@
package workflows
import (
config "../models/config"
res "../resources"
utils "../utils"
log "../utils/log"
)
// StartWorkflowByParameter - Start needed workflow by given parameter
@@ -10,7 +12,11 @@ func StartWorkflowByParameter(url string) {
// Music
if CanUseDownloadMusic(url) {
if config.Config.JSONOnly {
GetMusicJSON(url)
} else {
DownloadMusic(url)
}
return
}
@@ -22,9 +28,30 @@ func StartWorkflowByParameter(url string) {
// Tiktok user
if CanUseDownloadUser(url) {
if config.Config.JSONOnly {
GetUserVideosJSON(utils.GetUsernameFromString(url))
} else {
DownloadUser(utils.GetUsernameFromString(url))
}
return
}
utils.LogFatal(res.ErrorCouldNotRecogniseURL, url)
// Tiktok hashtag
if CanUseDownloadHashtag(url) {
if config.Config.JSONOnly {
GetHashtagJSON(url)
} else {
DownloadHashtag(url)
}
return
}
// Share URL
if CanUseDownloadShareLink(url) {
DownloadShareLink(url)
return
}
log.LogFatal(res.ErrorCouldNotRecogniseURL, url)
}