5 Commits
1.8 ... 1.9.1

Author SHA1 Message Date
Pijus Kamandulis
92006d864f TTDL-20 Wait for error/items beefore quiting 2020-04-08 23:26:50 +03:00
Pijus Kamandulis
668b050dee Added sonar-scaner
Create LICENSE

Update go.yml
2020-04-04 21:09:41 +03:00
Pijus Kamandulis
feee0a9154 Added support for vm.tiktok.com urls 2020-03-22 12:38:08 +02:00
Pijus Kamandulis
af7972685e Fixed circular dependency issue 2020-03-22 00:22:08 +02:00
Pijus Kamandulis
f9d35e3bf2 TTDL-7 Added flag; Code clean up 2020-03-22 02:10:24 +02:00
28 changed files with 335 additions and 93 deletions

View File

@@ -33,6 +33,9 @@ jobs:
- name: Build
run: npm run build:dist
- name: Copy license
run: cp LICENSE out
- name: Upload Unix Artifacts
if: startsWith(matrix.os, 'ubuntu-')

1
.gitignore vendored
View File

@@ -6,3 +6,4 @@ downloads
tiktok-dl
batch_file.txt
debug.log
.scannerwork

View File

@@ -0,0 +1,12 @@
sonar.organization=pikami
sonar.projectKey=tiktok-dl
sonar.host.url=https://sonarcloud.io
sonar.sources=.
sonar.exclusions=**/*_test.go,**/node_modules/**
sonar.tests=.
sonar.test.inclusions=**/*_test.go
sonar.test.exclusions=**/node_modules/**
sonar.go.coverage.reportPaths=cov.out

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 pikami
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -16,14 +16,15 @@ You can download items listed in a text file by running `./tiktok-dl [OPTIONS] -
Clone this repository and run `go build` to build the executable.
## Available options
* `-debug` - enables debug mode
* `-output some_directory` - Output path (default "./downloads")
* `-metadata` - Write video metadata to a .json file
* `-archive` - Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.
* `-batch-file` - File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.
* `-deadline` - Sets the timout for scraper logic in seconds (used as a workaround for context deadline exceeded error) (default 1500)
* `-quiet` - Supress output
* `-debug` - enables debug mode
* `-json` - Returns whole data, that was scraped from TikTok, in json
* `-limit` - Sets the max count of video that will be downloaded (default infinity)
* `-metadata` - Write video metadata to a .json file
* `-output some_directory` - Output path (default "./downloads")
* `-quiet` - Supress output
## Acknowledgments
This software uses the **chromedp** for web scraping, it can be found here: https://github.com/chromedp/chromedp \

View File

@@ -3,15 +3,16 @@ package client
import (
"context"
"errors"
"github.com/chromedp/chromedp"
"io/ioutil"
"log"
"os"
"strings"
"time"
"github.com/chromedp/chromedp"
config "../models/config"
utils "../utils"
log "../utils/log"
)
// GetMusicUploads - Get all uploads by given music
@@ -33,7 +34,7 @@ func executeClientAction(url string, jsAction string) (string, error) {
ctx, cancel := chromedp.NewContext(
allocCtx,
chromedp.WithLogf(log.Printf),
chromedp.WithLogf(log.Logf),
)
defer cancel()
@@ -84,9 +85,9 @@ func runScrapeWithInfo(ctx context.Context, jsAction string, url string) (string
}
if jsOutput != "0" {
utils.Logf("\rPreloading... %s items have been founded.", jsOutput)
log.Logf("\rPreloading... %s items have been found.", jsOutput)
} else {
utils.Logf("\rPreloading...")
log.Logf("\rPreloading...")
}
if err := chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.finished.toString()", &jsOutput)); err != nil {
@@ -100,7 +101,7 @@ func runScrapeWithInfo(ctx context.Context, jsAction string, url string) (string
time.Sleep(50 * time.Millisecond)
}
utils.Log("\nRetrieving items...")
log.Log("\nRetrieving items...")
if err := chromedp.Run(ctx,
// Wait until custom js finishes
chromedp.WaitVisible(`video_urls`),

View File

@@ -1,22 +1,24 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
"fmt"
)
// GetUserUploads - Get all uploads marked with given hashtag
// GetHashtagUploads - Get all uploads marked with given hashtag
func GetHashtagUploads(hashtagURL string) ([]models.Upload, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(hashtagURL, jsMethod)
actionOutput, err := GetHashtagUploadsJSON(hashtagURL)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
func GetHashtagUploadsJson(hashtagURL string) (string, error) {
// GetHashtagUploadsJSON - Get hashtag uploads scrape
func GetHashtagUploadsJSON(hashtagURL string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(hashtagURL, jsMethod)
if err != nil {

View File

@@ -1,22 +1,23 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
"fmt"
)
// GetMusicUploads - Get all uploads by given music
func GetMusicUploads(url string) ([]models.Upload, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(url, jsMethod)
actionOutput, err := GetMusicUploadsJSON(url)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
func GetMusicUploadsJson(url string) (string, error) {
// GetMusicUploadsJSON - Get music uploads scrape
func GetMusicUploadsJSON(url string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(url, jsMethod)
if err != nil {

52
client/getRedirectUrl.go Normal file
View File

@@ -0,0 +1,52 @@
package client
import (
"context"
"github.com/chromedp/chromedp"
"io/ioutil"
"os"
"time"
config "../models/config"
log "../utils/log"
)
func GetRedirectUrl(url string) (string, error) {
dir, err := ioutil.TempDir("", "chromedp-example")
if err != nil {
return "", err
}
defer os.RemoveAll(dir)
opts := append(chromedp.DefaultExecAllocatorOptions[:],
chromedp.DisableGPU,
chromedp.UserDataDir(dir),
chromedp.Flag("headless", !config.Config.Debug),
)
allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...)
defer cancel()
ctx, cancel := chromedp.NewContext(
allocCtx,
chromedp.WithLogf(log.Logf),
)
defer cancel()
ctx, cancel = context.WithTimeout(ctx, time.Duration(config.Config.Deadline)*time.Second)
defer cancel()
var jsOutput string
if err := chromedp.Run(ctx,
// Navigate to user's page
chromedp.Navigate(url),
// Wait until page loads
chromedp.WaitReady(`div`),
// Grab url links from our element
chromedp.EvaluateAsDevTools(`window.location.href`, &jsOutput),
); err != nil {
return "", err
}
return jsOutput, err
}

View File

@@ -1,22 +1,23 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
"fmt"
)
// GetUserUploads - Get all uploads by user
func GetUserUploads(username string) ([]models.Upload, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(`https://www.tiktok.com/@`+username, jsMethod)
actionOutput, err := GetUserUploadsJSON(username)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
func GetUserUploadsJson(username string) (string, error) {
// GetUserUploadsJSON - Get user uploads scrape
func GetUserUploadsJSON(username string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(`https://www.tiktok.com/@`+username, jsMethod)
if err != nil {

View File

@@ -8,27 +8,29 @@ import (
// Config - Runtime configuration
var Config struct {
URL string
OutputPath string
BatchFilePath string
Debug bool
MetaData bool
Quiet bool
Deadline int
Limit int
JSONOnly bool
URL string
OutputPath string
BatchFilePath string
ArchiveFilePath string
Debug bool
MetaData bool
Quiet bool
JSONOnly bool
Deadline int
Limit int
}
// GetConfig - Returns Config object
func GetConfig() {
outputPath := flag.String("output", "./downloads", "Output path")
batchFilePath := flag.String("batch-file", "", "File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.")
archive := flag.String("archive", "", "Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.")
debug := flag.Bool("debug", false, "Enables debug mode")
metadata := flag.Bool("metadata", false, "Write video metadata to a .json file")
quiet := flag.Bool("quiet", false, "Supress output")
jsonOnly := flag.Bool("json", false, "Just get JSON data from scraper (without video downloading)")
deadline := flag.Int("deadline", 1500, "Sets the timout for scraper logic in seconds (used as a workaround for 'context deadline exceeded' error)")
limit := flag.Int("limit", 0, "Sets the videos count limit (useful when there too many videos from the user or by hashtag)")
jsonOnly := flag.Bool("json", false, "Just get JSON data from scraper (without video downloading)")
flag.Parse()
args := flag.Args()
@@ -45,13 +47,14 @@ func GetConfig() {
}
Config.OutputPath = *outputPath
Config.BatchFilePath = *batchFilePath
Config.ArchiveFilePath = *archive
Config.Debug = *debug
Config.MetaData = *metadata
Config.Quiet = *quiet
if *jsonOnly {
Config.Quiet = true
}
Config.JSONOnly = *jsonOnly
Config.Deadline = *deadline
Config.Limit = *limit
Config.JSONOnly = *jsonOnly
}

View File

@@ -1,11 +1,13 @@
package models
import (
res "../resources"
utils "../utils"
"encoding/json"
"os"
"strings"
res "../resources"
checkErr "../utils/checkErr"
log "../utils/log"
)
// Upload - Upload object
@@ -47,16 +49,16 @@ func (u Upload) GetUploadID() string {
func (u Upload) WriteToFile(outputPath string) {
bytes, err := json.Marshal(u)
if err != nil {
utils.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID())
log.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID())
panic(err)
}
// Create the file
out, err := os.Create(outputPath)
utils.CheckErr(err)
checkErr.CheckErr(err)
defer out.Close()
// Write to file
_, err = out.Write(bytes)
utils.CheckErr(err)
checkErr.CheckErr(err)
}

View File

@@ -1,10 +1,11 @@
package models
import (
testUtil "../unitTestUtil"
utils "../utils"
"os"
"testing"
testUtil "../unitTestUtil"
fileio "../utils/fileio"
)
func TestParseUploads(t *testing.T) {
@@ -62,7 +63,7 @@ func TestWriteToFile(t *testing.T) {
upload.WriteToFile(filePath)
actual := utils.ReadFileToString(filePath)
actual := fileio.ReadFileToString(filePath)
tu.AssertString(actual, expected, "File content")
os.Remove(filePath)

View File

@@ -3,12 +3,14 @@
"version": "0.0.1",
"scripts": {
"install-dependencies": "go get -v -t -d ./...",
"test:coverage": "go test -short -coverprofile=cov.out ./models ./utils",
"test": "go test -v ./models && go test -v ./utils",
"clean": "rm -rf out",
"build:scraper": "node node_modules/terser/bin/terser -c -m -- scraper.js > out/scraper.js",
"build:app": "go build -o out/ -v .",
"build:dist": "mkdir out && npm run build:app && npm run build:scraper",
"build": "go build -v ."
"build": "go build -v .",
"sonar": "sonar-scanner -Dsonar.login=${SONAR_LOGIN} -Dproject.settings=.sonar/sonar-project.properties"
},
"dependencies": {
"terser": "^4.6.3"

View File

@@ -129,9 +129,12 @@ getCurrentVideo = function() {
};
};
scrollBottom = () => window.scrollTo(0, document.body.scrollHeight);
scrollWhileNew = function(finishCallback) {
var state = { count: 0 };
var intervalID = window.setInterval(x => {
scrollBottom();
var oldCount = state.count;
state.count = document.getElementsByClassName(optStrings.classes.feedVideoItem).length;
if(currentState.limit > 0) {
@@ -143,13 +146,13 @@ scrollWhileNew = function(finishCallback) {
if(checkForErrors()) {
window.clearInterval(intervalID);
return;
} else if (state.count == 0) {
return;
}
if (oldCount !== state.count) {
currentState.preloadCount = state.count;
window.scrollTo(0, document.body.scrollHeight);
} else {
if (document.querySelector(optStrings.selectors.feedLoading)) {
window.scrollTo(0, document.body.scrollHeight);
return;
}
window.clearInterval(intervalID);

54
utils/archive.go Normal file
View File

@@ -0,0 +1,54 @@
package utils
import (
models "../models"
config "../models/config"
fileio "./fileio"
log "./log"
)
// IsItemInArchive - Checks if the item is already archived
func IsItemInArchive(upload models.Upload) bool {
if len(RemoveArchivedItems([]models.Upload{upload})) == 0 {
return true
}
return false
}
// RemoveArchivedItems - Returns items slice without archived items
func RemoveArchivedItems(uploads []models.Upload) []models.Upload {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" || !fileio.CheckIfExists(archiveFilePath) {
return uploads
}
removeArchivedItemsDelegate := func(archivedItem string) {
for i, upload := range uploads {
if upload.GetUploadID() == archivedItem {
uploads = append(uploads[:i], uploads[i+1:]...)
}
}
}
lenBeforeRemoval := len(uploads)
fileio.ReadFileLineByLine(archiveFilePath, removeArchivedItemsDelegate)
removedCount := lenBeforeRemoval - len(uploads)
if removedCount > 0 {
log.Logf("%d items, found in archive. Skipping...\n", removedCount)
}
return uploads
}
// AddItemToArchive - Adds item to archived list
func AddItemToArchive(uploadID string) {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" {
return
}
fileio.AppendToFile(uploadID, archiveFilePath)
}

View File

@@ -4,21 +4,23 @@ import (
"io"
"net/http"
"os"
checkErr "./checkErr"
)
// DownloadFile - Downloads content from `url` and stores it in `outputPath`
func DownloadFile(outputPath string, url string) {
// Get the data
resp, err := http.Get(url)
CheckErr(err)
checkErr.CheckErr(err)
defer resp.Body.Close()
// Create the file
out, err := os.Create(outputPath)
CheckErr(err)
checkErr.CheckErr(err)
defer out.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
CheckErr(err)
checkErr.CheckErr(err)
}

View File

@@ -4,6 +4,8 @@ import (
"bufio"
"io/ioutil"
"os"
checkErr "../checkErr"
)
type delegateString func(string)
@@ -37,7 +39,7 @@ func ReadFileToString(path string) string {
// ReadFileLineByLine - Reads file line by line and calls delegate
func ReadFileLineByLine(path string, delegate delegateString) {
file, err := os.Open(path)
CheckErr(err)
checkErr.CheckErr(err)
defer file.Close()
scanner := bufio.NewScanner(file)
@@ -49,3 +51,14 @@ func ReadFileLineByLine(path string, delegate delegateString) {
panic(err)
}
}
// AppendToFile - Appends line to file
func AppendToFile(str string, filePath string) {
f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
checkErr.CheckErr(err)
defer f.Close()
if _, err := f.WriteString(str + "\n"); err != nil {
checkErr.CheckErr(err)
}
}

View File

@@ -1,9 +1,10 @@
package utils
import (
config "../models/config"
"fmt"
"os"
config "../../models/config"
)
// Log - Write to std out

View File

@@ -2,11 +2,13 @@ package utils
import (
"io/ioutil"
checkErr "./checkErr"
)
// ReadFileAsString - Returns contents of given file
func ReadFileAsString(fileName string) string {
content, err := ioutil.ReadFile(fileName)
CheckErr(err)
checkErr.CheckErr(err)
return string(content)
}

View File

@@ -2,7 +2,8 @@ package workflows
import (
res "../resources"
utils "../utils"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadBatchFile - Check's if DownloadBatchFile can be used
@@ -12,11 +13,11 @@ func CanUseDownloadBatchFile(batchFilePath string) bool {
// DownloadBatchFile - Download items from batch file
func DownloadBatchFile(batchFilePath string) {
if !utils.CheckIfExists(batchFilePath) {
utils.LogFatal(res.ErrorPathNotFound, batchFilePath)
if !fileio.CheckIfExists(batchFilePath) {
log.LogFatal(res.ErrorPathNotFound, batchFilePath)
}
utils.ReadFileLineByLine(batchFilePath, downloadItem)
fileio.ReadFileLineByLine(batchFilePath, downloadItem)
}
func downloadItem(batchItem string) {

View File

@@ -1,12 +1,15 @@
package workflows
import (
"fmt"
"strings"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"strings"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadHashtag - Test's if this workflow can be used for parameter
@@ -19,26 +22,30 @@ func CanUseDownloadHashtag(url string) bool {
func DownloadHashtag(url string) {
uploads, err := client.GetHashtagUploads(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
hashtag := utils.GetHashtagFromURL(url)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, hashtag)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads {
downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
utils.Log()
log.Log()
}
func GetHashtagJson(url string) {
uploads, err := client.GetHashtagUploads(url)
// GetHashtagJSON - Prints scraped info from hashtag
func GetHashtagJSON(url string) {
uploads, err := client.GetHashtagUploadsJSON(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)

View File

@@ -1,12 +1,15 @@
package workflows
import (
"fmt"
"regexp"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadMusic - Check's if DownloadMusic can be used for parameter
@@ -19,26 +22,29 @@ func CanUseDownloadMusic(url string) bool {
func DownloadMusic(url string) {
uploads, err := client.GetMusicUploads(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
for index, upload := range uploads {
username := utils.GetUsernameFromString(upload.Uploader)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
utils.Log()
log.Log()
}
func GetMusicJson(url string) {
uploads, err := client.GetMusicUploadsJson(url)
// GetMusicJSON - Prints scraped info from music
func GetMusicJSON(url string) {
uploads, err := client.GetMusicUploadsJSON(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)

View File

@@ -0,0 +1,27 @@
package workflows
import (
client "../client"
res "../resources"
log "../utils/log"
"regexp"
)
// CanUseDownloadShareLink - Check's if DownloadShareLink can be used
func CanUseDownloadShareLink(url string) bool {
match, _ := regexp.MatchString("vm.tiktok.com\\/.+", url)
return match
}
// DownloadShareLink - Download item by share link
func DownloadShareLink(url string) {
log.Logf("Resolving share link: %s\n", url)
finalURL, err := client.GetRedirectUrl(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
StartWorkflowByParameter(finalURL)
}

View File

@@ -1,13 +1,16 @@
package workflows
import (
"fmt"
"regexp"
"strings"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
"strings"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadUser - Test's if this workflow can be used for parameter
@@ -21,25 +24,29 @@ func CanUseDownloadUser(url string) bool {
func DownloadUser(username string) {
uploads, err := client.GetUserUploads(username)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads {
downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
utils.Log()
log.Log()
}
func GetUserVideosJson(username string) {
uploads, err := client.GetUserUploadsJson(username)
// GetUserVideosJSON - Prints scraped info from user
func GetUserVideosJSON(username string) {
uploads, err := client.GetUserUploadsJSON(username)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)

View File

@@ -1,13 +1,16 @@
package workflows
import (
"fmt"
"regexp"
client "../client"
models "../models"
config "../models/config"
res "../resources"
utils "../utils"
"fmt"
"regexp"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadSingleVideo - Check's if DownloadSingleVideo can be used for parameter
@@ -21,14 +24,18 @@ func DownloadSingleVideo(url string) {
username := utils.GetUsernameFromString(url)
upload, err := client.GetVideoDetails(url)
if err != nil {
utils.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
if utils.IsItemInArchive(upload) {
return
}
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir)
fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir)
utils.Log("[1/1] Downloaded\n")
log.Log("[1/1] Downloaded\n")
}
// DownloadVideo - Downloads one video
@@ -36,7 +43,7 @@ func downloadVideo(upload models.Upload, downloadDir string) {
uploadID := upload.GetUploadID()
downloadPath := fmt.Sprintf("%s/%s.mp4", downloadDir, uploadID)
if utils.CheckIfExists(downloadPath) {
if fileio.CheckIfExists(downloadPath) {
return
}
@@ -46,4 +53,6 @@ func downloadVideo(upload models.Upload, downloadDir string) {
metadataPath := fmt.Sprintf("%s/%s.json", downloadDir, uploadID)
upload.WriteToFile(metadataPath)
}
utils.AddItemToArchive(upload.GetUploadID())
}

View File

@@ -4,6 +4,7 @@ import (
config "../models/config"
res "../resources"
utils "../utils"
log "../utils/log"
)
// StartWorkflowByParameter - Start needed workflow by given parameter
@@ -12,7 +13,7 @@ func StartWorkflowByParameter(url string) {
// Music
if CanUseDownloadMusic(url) {
if config.Config.JSONOnly {
GetMusicJson(url)
GetMusicJSON(url)
} else {
DownloadMusic(url)
}
@@ -28,7 +29,7 @@ func StartWorkflowByParameter(url string) {
// Tiktok user
if CanUseDownloadUser(url) {
if config.Config.JSONOnly {
GetUserVideosJson(utils.GetUsernameFromString(url))
GetUserVideosJSON(utils.GetUsernameFromString(url))
} else {
DownloadUser(utils.GetUsernameFromString(url))
}
@@ -39,12 +40,18 @@ func StartWorkflowByParameter(url string) {
// Tiktok hashtag
if CanUseDownloadHashtag(url) {
if config.Config.JSONOnly {
GetHashtagJson(url)
GetHashtagJSON(url)
} else {
DownloadHashtag(url)
}
return
}
utils.LogFatal(res.ErrorCouldNotRecogniseURL, url)
// Share URL
if CanUseDownloadShareLink(url) {
DownloadShareLink(url)
return
}
log.LogFatal(res.ErrorCouldNotRecogniseURL, url)
}