14 Commits
1.6 ... 1.9.1

Author SHA1 Message Date
Pijus Kamandulis
92006d864f TTDL-20 Wait for error/items beefore quiting 2020-04-08 23:26:50 +03:00
Pijus Kamandulis
668b050dee Added sonar-scaner
Create LICENSE

Update go.yml
2020-04-04 21:09:41 +03:00
Pijus Kamandulis
feee0a9154 Added support for vm.tiktok.com urls 2020-03-22 12:38:08 +02:00
Pijus Kamandulis
af7972685e Fixed circular dependency issue 2020-03-22 00:22:08 +02:00
Pijus Kamandulis
f9d35e3bf2 TTDL-7 Added flag; Code clean up 2020-03-22 02:10:24 +02:00
Pijus Kamandulis
9a65746fd4 Update go.yml 2020-02-25 21:44:43 +02:00
Pijus Kamandulis
70c605a696 Merge pull request #6 from intracomof/master
Download videos by hashtag; limit option; get just json data
2020-02-25 21:33:57 +02:00
alexpin
208bffb846 error handling 2020-02-25 21:16:57 +02:00
alexpin
7b9b7688a1 formatter 2020-02-25 21:03:06 +02:00
intracomof
e77c904f89 Merge branch 'master' into master 2020-02-25 21:01:43 +02:00
alexpin
68612282ee default limit value updated; WaitReady(video) removed 2020-02-25 20:55:56 +02:00
Pijus Kamandulis
7a691ad32d TTDL-5 Added better error handling 2020-02-25 20:12:01 +02:00
alexpin
b6bb470064 formatter 2020-02-25 01:01:10 +02:00
alexpin
f724f0f2a2 Download videos by hashtag; get json data without video downloading; limit option 2020-02-25 00:56:19 +02:00
31 changed files with 592 additions and 104 deletions

View File

@@ -1,5 +1,5 @@
name: tiktok-dl_CI name: tiktok-dl_CI
on: [push] on: [push, pull_request]
jobs: jobs:
build: build:
strategy: strategy:
@@ -34,6 +34,9 @@ jobs:
- name: Build - name: Build
run: npm run build:dist run: npm run build:dist
- name: Copy license
run: cp LICENSE out
- name: Upload Unix Artifacts - name: Upload Unix Artifacts
if: startsWith(matrix.os, 'ubuntu-') if: startsWith(matrix.os, 'ubuntu-')
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1

1
.gitignore vendored
View File

@@ -6,3 +6,4 @@ downloads
tiktok-dl tiktok-dl
batch_file.txt batch_file.txt
debug.log debug.log
.scannerwork

View File

@@ -0,0 +1,12 @@
sonar.organization=pikami
sonar.projectKey=tiktok-dl
sonar.host.url=https://sonarcloud.io
sonar.sources=.
sonar.exclusions=**/*_test.go,**/node_modules/**
sonar.tests=.
sonar.test.inclusions=**/*_test.go
sonar.test.exclusions=**/node_modules/**
sonar.go.coverage.reportPaths=cov.out

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 pikami
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -16,11 +16,14 @@ You can download items listed in a text file by running `./tiktok-dl [OPTIONS] -
Clone this repository and run `go build` to build the executable. Clone this repository and run `go build` to build the executable.
## Available options ## Available options
* `-debug` - enables debug mode * `-archive` - Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.
* `-output some_directory` - Output path (default "./downloads")
* `-metadata` - Write video metadata to a .json file
* `-batch-file` - File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored. * `-batch-file` - File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.
* `-deadline` - Sets the timout for scraper logic in seconds (used as a workaround for context deadline exceeded error) (default 1500) * `-deadline` - Sets the timout for scraper logic in seconds (used as a workaround for context deadline exceeded error) (default 1500)
* `-debug` - enables debug mode
* `-json` - Returns whole data, that was scraped from TikTok, in json
* `-limit` - Sets the max count of video that will be downloaded (default infinity)
* `-metadata` - Write video metadata to a .json file
* `-output some_directory` - Output path (default "./downloads")
* `-quiet` - Supress output * `-quiet` - Supress output
## Acknowledgments ## Acknowledgments

View File

@@ -2,20 +2,25 @@ package client
import ( import (
"context" "context"
"github.com/chromedp/chromedp" "errors"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"strings"
"time" "time"
"github.com/chromedp/chromedp"
config "../models/config" config "../models/config"
utils "../utils" utils "../utils"
log "../utils/log"
) )
// GetMusicUploads - Get all uploads by given music // GetMusicUploads - Get all uploads by given music
func executeClientAction(url string, jsAction string) string { func executeClientAction(url string, jsAction string) (string, error) {
dir, err := ioutil.TempDir("", "chromedp-example") dir, err := ioutil.TempDir("", "chromedp-example")
utils.CheckErr(err) if err != nil {
return "", err
}
defer os.RemoveAll(dir) defer os.RemoveAll(dir)
opts := append(chromedp.DefaultExecAllocatorOptions[:], opts := append(chromedp.DefaultExecAllocatorOptions[:],
@@ -29,22 +34,23 @@ func executeClientAction(url string, jsAction string) string {
ctx, cancel := chromedp.NewContext( ctx, cancel := chromedp.NewContext(
allocCtx, allocCtx,
chromedp.WithLogf(log.Printf), chromedp.WithLogf(log.Logf),
) )
defer cancel() defer cancel()
ctx, cancel = context.WithTimeout(ctx, time.Duration(config.Config.Deadline)*time.Second) ctx, cancel = context.WithTimeout(ctx, time.Duration(config.Config.Deadline)*time.Second)
defer cancel() defer cancel()
var jsOutput string jsOutput, err := runScrapeWithInfo(ctx, jsAction, url)
jsOutput = runScrapeWithInfo(ctx, jsAction, url) if strings.HasPrefix(jsOutput, "\"ERR:") {
err = errors.New(jsOutput)
return jsOutput }
return jsOutput, err
} }
func runScrapeQuiet(ctx context.Context, jsAction string, url string) string { func runScrapeQuiet(ctx context.Context, jsAction string, url string) (string, error) {
var jsOutput string var jsOutput string
err := chromedp.Run(ctx, if err := chromedp.Run(ctx,
// Navigate to user's page // Navigate to user's page
chromedp.Navigate(url), chromedp.Navigate(url),
// Execute url grabber script // Execute url grabber script
@@ -54,33 +60,40 @@ func runScrapeQuiet(ctx context.Context, jsAction string, url string) string {
chromedp.WaitVisible(`video_urls`), chromedp.WaitVisible(`video_urls`),
// Grab url links from our element // Grab url links from our element
chromedp.InnerHTML(`video_urls`, &jsOutput), chromedp.InnerHTML(`video_urls`, &jsOutput),
) ); err != nil {
utils.CheckErr(err) return "", err
return jsOutput }
return jsOutput, nil
} }
func runScrapeWithInfo(ctx context.Context, jsAction string, url string) string { func runScrapeWithInfo(ctx context.Context, jsAction string, url string) (string, error) {
var jsOutput string var jsOutput string
err := chromedp.Run(ctx, if err := chromedp.Run(ctx,
// Navigate to user's page // Navigate to user's page
chromedp.Navigate(url), chromedp.Navigate(url),
// Execute url grabber script // Execute url grabber script
chromedp.EvaluateAsDevTools(utils.ReadFileAsString("scraper.js"), &jsOutput), chromedp.EvaluateAsDevTools(utils.ReadFileAsString("scraper.js"), &jsOutput),
chromedp.EvaluateAsDevTools(jsAction, &jsOutput), chromedp.EvaluateAsDevTools(jsAction, &jsOutput),
) ); err != nil {
utils.CheckErr(err) return "", err
}
for { for {
err = chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.preloadCount.toString()", &jsOutput)) if err := chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.preloadCount.toString()", &jsOutput)); err != nil {
utils.CheckErr(err) return "", err
if jsOutput != "0" { }
utils.Logf("\rPreloading... Currently loaded %s items.", jsOutput)
} else { if jsOutput != "0" {
utils.Logf("\rPreloading...") log.Logf("\rPreloading... %s items have been found.", jsOutput)
} else {
log.Logf("\rPreloading...")
}
if err := chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.finished.toString()", &jsOutput)); err != nil {
return "", err
} }
err = chromedp.Run(ctx, chromedp.EvaluateAsDevTools("currentState.finished.toString()", &jsOutput))
utils.CheckErr(err)
if jsOutput == "true" { if jsOutput == "true" {
break break
} }
@@ -88,14 +101,15 @@ func runScrapeWithInfo(ctx context.Context, jsAction string, url string) string
time.Sleep(50 * time.Millisecond) time.Sleep(50 * time.Millisecond)
} }
utils.Log("\nRetrieving items...") log.Log("\nRetrieving items...")
err = chromedp.Run(ctx, if err := chromedp.Run(ctx,
// Wait until custom js finishes // Wait until custom js finishes
chromedp.WaitVisible(`video_urls`), chromedp.WaitVisible(`video_urls`),
// Grab url links from our element // Grab url links from our element
chromedp.InnerHTML(`video_urls`, &jsOutput), chromedp.InnerHTML(`video_urls`, &jsOutput),
) ); err != nil {
utils.CheckErr(err) return "", err
}
return jsOutput return jsOutput, nil
} }

View File

@@ -0,0 +1,28 @@
package client
import (
"fmt"
models "../models"
config "../models/config"
)
// GetHashtagUploads - Get all uploads marked with given hashtag
func GetHashtagUploads(hashtagURL string) ([]models.Upload, error) {
actionOutput, err := GetHashtagUploadsJSON(hashtagURL)
if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetHashtagUploadsJSON - Get hashtag uploads scrape
func GetHashtagUploadsJSON(hashtagURL string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(hashtagURL, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
}

View File

@@ -1,11 +1,27 @@
package client package client
import ( import (
"fmt"
models "../models" models "../models"
config "../models/config"
) )
// GetMusicUploads - Get all uploads by given music // GetMusicUploads - Get all uploads by given music
func GetMusicUploads(url string) []models.Upload { func GetMusicUploads(url string) ([]models.Upload, error) {
actionOutput := executeClientAction(url, "bootstrapIteratingVideos()") actionOutput, err := GetMusicUploadsJSON(url)
return models.ParseUploads(actionOutput) if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetMusicUploadsJSON - Get music uploads scrape
func GetMusicUploadsJSON(url string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(url, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
} }

52
client/getRedirectUrl.go Normal file
View File

@@ -0,0 +1,52 @@
package client
import (
"context"
"github.com/chromedp/chromedp"
"io/ioutil"
"os"
"time"
config "../models/config"
log "../utils/log"
)
func GetRedirectUrl(url string) (string, error) {
dir, err := ioutil.TempDir("", "chromedp-example")
if err != nil {
return "", err
}
defer os.RemoveAll(dir)
opts := append(chromedp.DefaultExecAllocatorOptions[:],
chromedp.DisableGPU,
chromedp.UserDataDir(dir),
chromedp.Flag("headless", !config.Config.Debug),
)
allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...)
defer cancel()
ctx, cancel := chromedp.NewContext(
allocCtx,
chromedp.WithLogf(log.Logf),
)
defer cancel()
ctx, cancel = context.WithTimeout(ctx, time.Duration(config.Config.Deadline)*time.Second)
defer cancel()
var jsOutput string
if err := chromedp.Run(ctx,
// Navigate to user's page
chromedp.Navigate(url),
// Wait until page loads
chromedp.WaitReady(`div`),
// Grab url links from our element
chromedp.EvaluateAsDevTools(`window.location.href`, &jsOutput),
); err != nil {
return "", err
}
return jsOutput, err
}

View File

@@ -1,11 +1,27 @@
package client package client
import ( import (
"fmt"
models "../models" models "../models"
config "../models/config"
) )
// GetUserUploads - Get all uploads by user // GetUserUploads - Get all uploads by user
func GetUserUploads(username string) []models.Upload { func GetUserUploads(username string) ([]models.Upload, error) {
actionOutput := executeClientAction(`https://www.tiktok.com/@`+username, "bootstrapIteratingVideos()") actionOutput, err := GetUserUploadsJSON(username)
return models.ParseUploads(actionOutput) if err != nil {
return nil, err
}
return models.ParseUploads(actionOutput), nil
}
// GetUserUploadsJSON - Get user uploads scrape
func GetUserUploadsJSON(username string) (string, error) {
jsMethod := fmt.Sprintf("bootstrapIteratingVideos(%d)", config.Config.Limit)
actionOutput, err := executeClientAction(`https://www.tiktok.com/@`+username, jsMethod)
if err != nil {
return "", err
}
return actionOutput, nil
} }

View File

@@ -5,7 +5,10 @@ import (
) )
// GetVideoDetails - returns details of video // GetVideoDetails - returns details of video
func GetVideoDetails(videoURL string) models.Upload { func GetVideoDetails(videoURL string) (models.Upload, error) {
actionOutput := executeClientAction(videoURL, "bootstrapGetCurrentVideo()") actionOutput, err := executeClientAction(videoURL, "bootstrapGetCurrentVideo()")
return models.ParseUpload(actionOutput) if err != nil {
return models.Upload{}, err
}
return models.ParseUpload(actionOutput), nil
} }

View File

@@ -8,23 +8,29 @@ import (
// Config - Runtime configuration // Config - Runtime configuration
var Config struct { var Config struct {
URL string URL string
OutputPath string OutputPath string
BatchFilePath string BatchFilePath string
Debug bool ArchiveFilePath string
MetaData bool Debug bool
Quiet bool MetaData bool
Deadline int Quiet bool
JSONOnly bool
Deadline int
Limit int
} }
// GetConfig - Returns Config object // GetConfig - Returns Config object
func GetConfig() { func GetConfig() {
outputPath := flag.String("output", "./downloads", "Output path") outputPath := flag.String("output", "./downloads", "Output path")
batchFilePath := flag.String("batch-file", "", "File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.") batchFilePath := flag.String("batch-file", "", "File containing URLs/Usernames to download, one value per line. Lines starting with '#', are considered as comments and ignored.")
archive := flag.String("archive", "", "Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.")
debug := flag.Bool("debug", false, "Enables debug mode") debug := flag.Bool("debug", false, "Enables debug mode")
metadata := flag.Bool("metadata", false, "Write video metadata to a .json file") metadata := flag.Bool("metadata", false, "Write video metadata to a .json file")
quiet := flag.Bool("quiet", false, "Supress output") quiet := flag.Bool("quiet", false, "Supress output")
jsonOnly := flag.Bool("json", false, "Just get JSON data from scraper (without video downloading)")
deadline := flag.Int("deadline", 1500, "Sets the timout for scraper logic in seconds (used as a workaround for 'context deadline exceeded' error)") deadline := flag.Int("deadline", 1500, "Sets the timout for scraper logic in seconds (used as a workaround for 'context deadline exceeded' error)")
limit := flag.Int("limit", 0, "Sets the videos count limit (useful when there too many videos from the user or by hashtag)")
flag.Parse() flag.Parse()
args := flag.Args() args := flag.Args()
@@ -41,8 +47,14 @@ func GetConfig() {
} }
Config.OutputPath = *outputPath Config.OutputPath = *outputPath
Config.BatchFilePath = *batchFilePath Config.BatchFilePath = *batchFilePath
Config.ArchiveFilePath = *archive
Config.Debug = *debug Config.Debug = *debug
Config.MetaData = *metadata Config.MetaData = *metadata
Config.Quiet = *quiet Config.Quiet = *quiet
if *jsonOnly {
Config.Quiet = true
}
Config.JSONOnly = *jsonOnly
Config.Deadline = *deadline Config.Deadline = *deadline
Config.Limit = *limit
} }

View File

@@ -1,11 +1,13 @@
package models package models
import ( import (
res "../resources"
utils "../utils"
"encoding/json" "encoding/json"
"os" "os"
"strings" "strings"
res "../resources"
checkErr "../utils/checkErr"
log "../utils/log"
) )
// Upload - Upload object // Upload - Upload object
@@ -47,16 +49,16 @@ func (u Upload) GetUploadID() string {
func (u Upload) WriteToFile(outputPath string) { func (u Upload) WriteToFile(outputPath string) {
bytes, err := json.Marshal(u) bytes, err := json.Marshal(u)
if err != nil { if err != nil {
utils.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID()) log.Logf(res.ErrorCouldNotSerializeJSON, u.GetUploadID())
panic(err) panic(err)
} }
// Create the file // Create the file
out, err := os.Create(outputPath) out, err := os.Create(outputPath)
utils.CheckErr(err) checkErr.CheckErr(err)
defer out.Close() defer out.Close()
// Write to file // Write to file
_, err = out.Write(bytes) _, err = out.Write(bytes)
utils.CheckErr(err) checkErr.CheckErr(err)
} }

View File

@@ -1,10 +1,11 @@
package models package models
import ( import (
testUtil "../unitTestUtil"
utils "../utils"
"os" "os"
"testing" "testing"
testUtil "../unitTestUtil"
fileio "../utils/fileio"
) )
func TestParseUploads(t *testing.T) { func TestParseUploads(t *testing.T) {
@@ -62,7 +63,7 @@ func TestWriteToFile(t *testing.T) {
upload.WriteToFile(filePath) upload.WriteToFile(filePath)
actual := utils.ReadFileToString(filePath) actual := fileio.ReadFileToString(filePath)
tu.AssertString(actual, expected, "File content") tu.AssertString(actual, expected, "File content")
os.Remove(filePath) os.Remove(filePath)

View File

@@ -3,12 +3,14 @@
"version": "0.0.1", "version": "0.0.1",
"scripts": { "scripts": {
"install-dependencies": "go get -v -t -d ./...", "install-dependencies": "go get -v -t -d ./...",
"test:coverage": "go test -short -coverprofile=cov.out ./models ./utils",
"test": "go test -v ./models && go test -v ./utils", "test": "go test -v ./models && go test -v ./utils",
"clean": "rm -rf out", "clean": "rm -rf out",
"build:scraper": "node node_modules/terser/bin/terser -c -m -- scraper.js > out/scraper.js", "build:scraper": "node node_modules/terser/bin/terser -c -m -- scraper.js > out/scraper.js",
"build:app": "go build -o out/ -v .", "build:app": "go build -o out/ -v .",
"build:dist": "mkdir out && npm run build:app && npm run build:scraper", "build:dist": "mkdir out && npm run build:app && npm run build:scraper",
"build": "go build -v ." "build": "go build -v .",
"sonar": "sonar-scanner -Dsonar.login=${SONAR_LOGIN} -Dproject.settings=.sonar/sonar-project.properties"
}, },
"dependencies": { "dependencies": {
"terser": "^4.6.3" "terser": "^4.6.3"

View File

@@ -6,5 +6,8 @@ var ErrorCouldNotSerializeJSON = "Could not serialize json for video: %s\n"
// ErrorCouldNotRecogniseURL - // ErrorCouldNotRecogniseURL -
var ErrorCouldNotRecogniseURL = "Could not recognise URL format of string %s" var ErrorCouldNotRecogniseURL = "Could not recognise URL format of string %s"
// ErrorCouldNotGetUserUploads -
var ErrorCouldNotGetUserUploads = "Failed to get user uploads: %s\n"
// ErrorPathNotFound - // ErrorPathNotFound -
var ErrorPathNotFound = "File path %s not found." var ErrorPathNotFound = "File path %s not found."

View File

@@ -1,7 +1,7 @@
optStrings = { optStrings = {
selectors: { selectors: {
feedLoading: 'div.tiktok-loading.feed-loading', feedLoading: 'div.tiktok-loading.feed-loading',
modalArrowLeft: 'div.video-card-modal > div > img.arrow-right', modalArrowRight: 'div.video-card-modal > div > img.arrow-right',
modalClose: '.video-card-modal > div > div.close', modalClose: '.video-card-modal > div > div.close',
modalPlayer: 'div > div > main > div.video-card-modal > div > div.video-card-big > div.video-card-container > div > div > video', modalPlayer: 'div > div > main > div.video-card-modal > div > div.video-card-big > div.video-card-container > div > div > video',
modalShareInput: '.copy-link-container > input', modalShareInput: '.copy-link-container > input',
@@ -17,6 +17,7 @@ optStrings = {
classes: { classes: {
feedVideoItem: 'video-feed-item-wrapper', feedVideoItem: 'video-feed-item-wrapper',
modalCloseDisabled: 'disabled', modalCloseDisabled: 'disabled',
titleMessage: 'title',
}, },
tags: { tags: {
resultTag: 'video_urls', resultTag: 'video_urls',
@@ -25,11 +26,30 @@ optStrings = {
attributes: { attributes: {
src: "src", src: "src",
}, },
tiktokMessages: [
"Couldn't find this account",
"No videos yet",
"Video currently unavailable",
],
}; };
currentState = { currentState = {
preloadCount: 0, preloadCount: 0,
finished: false, finished: false,
limit: 0
};
checkForErrors = function() {
var titles = document.getElementsByClassName(optStrings.classes.titleMessage);
debugger;
if (titles && titles.length) {
var error = Array.from(titles).find(x => optStrings.tiktokMessages.includes(x.textContent)).textContent;
if (error) {
createVidUrlElement("ERR: " + error);
return true;
}
}
return false;
}; };
createVidUrlElement = function(outputObj) { createVidUrlElement = function(outputObj) {
@@ -37,7 +57,7 @@ createVidUrlElement = function(outputObj) {
urlSetElement.innerText = JSON.stringify(outputObj); urlSetElement.innerText = JSON.stringify(outputObj);
document.getElementsByTagName(optStrings.tags.resultParentTag)[0].appendChild(urlSetElement); document.getElementsByTagName(optStrings.tags.resultParentTag)[0].appendChild(urlSetElement);
currentState.finished = true; currentState.finished = true;
} };
buldVidUrlArray = function(finishCallback) { buldVidUrlArray = function(finishCallback) {
var feedItem = document.getElementsByClassName(optStrings.classes.feedVideoItem)[0]; var feedItem = document.getElementsByClassName(optStrings.classes.feedVideoItem)[0];
@@ -46,8 +66,14 @@ buldVidUrlArray = function(finishCallback) {
var videoArray = []; var videoArray = [];
var intervalID = window.setInterval(x => { var intervalID = window.setInterval(x => {
videoArray.push(getCurrentModalVideo()); videoArray.push(getCurrentModalVideo());
if(currentState.limit > 0) {
var arrowRight = document.querySelectorAll(optStrings.selectors.modalArrowLeft)[0]; if (videoArray.length >= currentState.limit) {
window.clearInterval(intervalID);
document.querySelector(optStrings.selectors.modalClose).click();
finishCallback(videoArray);
}
}
var arrowRight = document.querySelectorAll(optStrings.selectors.modalArrowRight)[0];
if (arrowRight.classList.contains(optStrings.classes.modalCloseDisabled)) { if (arrowRight.classList.contains(optStrings.classes.modalCloseDisabled)) {
window.clearInterval(intervalID); window.clearInterval(intervalID);
document.querySelector(optStrings.selectors.modalClose).click(); document.querySelector(optStrings.selectors.modalClose).click();
@@ -78,9 +104,10 @@ getCurrentModalVideo = function() {
link: soundHref, link: soundHref,
}, },
}; };
} };
getCurrentVideo = function() { getCurrentVideo = function() {
if(checkForErrors()) return;
var player = document.querySelector(optStrings.selectors.videoPlayer); var player = document.querySelector(optStrings.selectors.videoPlayer);
var vidUrl = player.getAttribute(optStrings.attributes.src); var vidUrl = player.getAttribute(optStrings.attributes.src);
var shareLink = document.querySelector(optStrings.selectors.videoShareInput).value; var shareLink = document.querySelector(optStrings.selectors.videoShareInput).value;
@@ -100,19 +127,32 @@ getCurrentVideo = function() {
link: soundHref, link: soundHref,
}, },
}; };
} };
scrollBottom = () => window.scrollTo(0, document.body.scrollHeight);
scrollWhileNew = function(finishCallback) { scrollWhileNew = function(finishCallback) {
var state = { count: 0 }; var state = { count: 0 };
var intervalID = window.setInterval(x => { var intervalID = window.setInterval(x => {
scrollBottom();
var oldCount = state.count; var oldCount = state.count;
state.count = document.getElementsByClassName(optStrings.classes.feedVideoItem).length; state.count = document.getElementsByClassName(optStrings.classes.feedVideoItem).length;
if(currentState.limit > 0) {
if (currentState.preloadCount >= currentState.limit || state.count >= currentState.limit) {
finishCallback(createVidUrlElement);
window.clearInterval(intervalID);
}
}
if(checkForErrors()) {
window.clearInterval(intervalID);
return;
} else if (state.count == 0) {
return;
}
if (oldCount !== state.count) { if (oldCount !== state.count) {
currentState.preloadCount = state.count; currentState.preloadCount = state.count;
window.scrollTo(0, document.body.scrollHeight);
} else { } else {
if (document.querySelector(optStrings.selectors.feedLoading)) { if (document.querySelector(optStrings.selectors.feedLoading)) {
window.scrollTo(0, document.body.scrollHeight);
return; return;
} }
window.clearInterval(intervalID); window.clearInterval(intervalID);
@@ -121,7 +161,8 @@ scrollWhileNew = function(finishCallback) {
}, 1000); }, 1000);
}; };
bootstrapIteratingVideos = function() { bootstrapIteratingVideos = function(limit) {
currentState.limit = limit;
scrollWhileNew(buldVidUrlArray); scrollWhileNew(buldVidUrlArray);
return 'bootstrapIteratingVideos'; return 'bootstrapIteratingVideos';
}; };
@@ -130,7 +171,7 @@ bootstrapGetCurrentVideo = function() {
var video = getCurrentVideo(); var video = getCurrentVideo();
createVidUrlElement(video); createVidUrlElement(video);
return 'bootstrapGetCurrentVideo'; return 'bootstrapGetCurrentVideo';
} };
init = () => { init = () => {
const newProto = navigator.__proto__; const newProto = navigator.__proto__;

54
utils/archive.go Normal file
View File

@@ -0,0 +1,54 @@
package utils
import (
models "../models"
config "../models/config"
fileio "./fileio"
log "./log"
)
// IsItemInArchive - Checks if the item is already archived
func IsItemInArchive(upload models.Upload) bool {
if len(RemoveArchivedItems([]models.Upload{upload})) == 0 {
return true
}
return false
}
// RemoveArchivedItems - Returns items slice without archived items
func RemoveArchivedItems(uploads []models.Upload) []models.Upload {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" || !fileio.CheckIfExists(archiveFilePath) {
return uploads
}
removeArchivedItemsDelegate := func(archivedItem string) {
for i, upload := range uploads {
if upload.GetUploadID() == archivedItem {
uploads = append(uploads[:i], uploads[i+1:]...)
}
}
}
lenBeforeRemoval := len(uploads)
fileio.ReadFileLineByLine(archiveFilePath, removeArchivedItemsDelegate)
removedCount := lenBeforeRemoval - len(uploads)
if removedCount > 0 {
log.Logf("%d items, found in archive. Skipping...\n", removedCount)
}
return uploads
}
// AddItemToArchive - Adds item to archived list
func AddItemToArchive(uploadID string) {
archiveFilePath := config.Config.ArchiveFilePath
if archiveFilePath == "" {
return
}
fileio.AppendToFile(uploadID, archiveFilePath)
}

View File

@@ -4,21 +4,23 @@ import (
"io" "io"
"net/http" "net/http"
"os" "os"
checkErr "./checkErr"
) )
// DownloadFile - Downloads content from `url` and stores it in `outputPath` // DownloadFile - Downloads content from `url` and stores it in `outputPath`
func DownloadFile(outputPath string, url string) { func DownloadFile(outputPath string, url string) {
// Get the data // Get the data
resp, err := http.Get(url) resp, err := http.Get(url)
CheckErr(err) checkErr.CheckErr(err)
defer resp.Body.Close() defer resp.Body.Close()
// Create the file // Create the file
out, err := os.Create(outputPath) out, err := os.Create(outputPath)
CheckErr(err) checkErr.CheckErr(err)
defer out.Close() defer out.Close()
// Write the body to file // Write the body to file
_, err = io.Copy(out, resp.Body) _, err = io.Copy(out, resp.Body)
CheckErr(err) checkErr.CheckErr(err)
} }

View File

@@ -4,6 +4,8 @@ import (
"bufio" "bufio"
"io/ioutil" "io/ioutil"
"os" "os"
checkErr "../checkErr"
) )
type delegateString func(string) type delegateString func(string)
@@ -37,7 +39,7 @@ func ReadFileToString(path string) string {
// ReadFileLineByLine - Reads file line by line and calls delegate // ReadFileLineByLine - Reads file line by line and calls delegate
func ReadFileLineByLine(path string, delegate delegateString) { func ReadFileLineByLine(path string, delegate delegateString) {
file, err := os.Open(path) file, err := os.Open(path)
CheckErr(err) checkErr.CheckErr(err)
defer file.Close() defer file.Close()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
@@ -49,3 +51,14 @@ func ReadFileLineByLine(path string, delegate delegateString) {
panic(err) panic(err)
} }
} }
// AppendToFile - Appends line to file
func AppendToFile(str string, filePath string) {
f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
checkErr.CheckErr(err)
defer f.Close()
if _, err := f.WriteString(str + "\n"); err != nil {
checkErr.CheckErr(err)
}
}

16
utils/getHashtag.go Normal file
View File

@@ -0,0 +1,16 @@
package utils
import (
res "../resources"
"fmt"
"strings"
)
// GetHashtagFromURL - Get's tag name from passed url
func GetHashtagFromURL(str string) string {
if match := strings.Contains(str, "/tag/"); match {
return strings.Split(str, "/tag/")[1]
}
panic(fmt.Sprintf(res.ErrorCouldNotRecogniseURL, str))
}

View File

@@ -1,8 +1,10 @@
package utils package utils
import ( import (
config "../models/config"
"fmt" "fmt"
"os"
config "../../models/config"
) )
// Log - Write to std out // Log - Write to std out
@@ -23,3 +25,8 @@ func Logf(format string, a ...interface{}) {
func LogFatal(format string, a ...interface{}) { func LogFatal(format string, a ...interface{}) {
panic(fmt.Sprintf(format, a...)) panic(fmt.Sprintf(format, a...))
} }
// LogErr - Write error
func LogErr(format string, a ...interface{}) {
fmt.Fprintf(os.Stderr, format, a...)
}

View File

@@ -2,11 +2,13 @@ package utils
import ( import (
"io/ioutil" "io/ioutil"
checkErr "./checkErr"
) )
// ReadFileAsString - Returns contents of given file // ReadFileAsString - Returns contents of given file
func ReadFileAsString(fileName string) string { func ReadFileAsString(fileName string) string {
content, err := ioutil.ReadFile(fileName) content, err := ioutil.ReadFile(fileName)
CheckErr(err) checkErr.CheckErr(err)
return string(content) return string(content)
} }

View File

@@ -2,7 +2,8 @@ package workflows
import ( import (
res "../resources" res "../resources"
utils "../utils" fileio "../utils/fileio"
log "../utils/log"
) )
// CanUseDownloadBatchFile - Check's if DownloadBatchFile can be used // CanUseDownloadBatchFile - Check's if DownloadBatchFile can be used
@@ -12,11 +13,11 @@ func CanUseDownloadBatchFile(batchFilePath string) bool {
// DownloadBatchFile - Download items from batch file // DownloadBatchFile - Download items from batch file
func DownloadBatchFile(batchFilePath string) { func DownloadBatchFile(batchFilePath string) {
if !utils.CheckIfExists(batchFilePath) { if !fileio.CheckIfExists(batchFilePath) {
utils.LogFatal(res.ErrorPathNotFound, batchFilePath) log.LogFatal(res.ErrorPathNotFound, batchFilePath)
} }
utils.ReadFileLineByLine(batchFilePath, downloadItem) fileio.ReadFileLineByLine(batchFilePath, downloadItem)
} }
func downloadItem(batchItem string) { func downloadItem(batchItem string) {

View File

@@ -0,0 +1,52 @@
package workflows
import (
"fmt"
"strings"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
fileio "../utils/fileio"
log "../utils/log"
)
// CanUseDownloadHashtag - Test's if this workflow can be used for parameter
func CanUseDownloadHashtag(url string) bool {
match := strings.Contains(url, "/tag/")
return match
}
// DownloadHashtag - Download videos marked with given hashtag
func DownloadHashtag(url string) {
uploads, err := client.GetHashtagUploads(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads)
hashtag := utils.GetHashtagFromURL(url)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, hashtag)
fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads {
downloadVideo(upload, downloadDir)
log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
}
log.Log()
}
// GetHashtagJSON - Prints scraped info from hashtag
func GetHashtagJSON(url string) {
uploads, err := client.GetHashtagUploadsJSON(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
}

View File

@@ -1,11 +1,15 @@
package workflows package workflows
import ( import (
client "../client"
config "../models/config"
utils "../utils"
"fmt" "fmt"
"regexp" "regexp"
client "../client"
config "../models/config"
res "../resources"
utils "../utils"
fileio "../utils/fileio"
log "../utils/log"
) )
// CanUseDownloadMusic - Check's if DownloadMusic can be used for parameter // CanUseDownloadMusic - Check's if DownloadMusic can be used for parameter
@@ -16,16 +20,32 @@ func CanUseDownloadMusic(url string) bool {
// DownloadMusic - Download all videos by given music // DownloadMusic - Download all videos by given music
func DownloadMusic(url string) { func DownloadMusic(url string) {
uploads := client.GetMusicUploads(url) uploads, err := client.GetMusicUploads(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads) uploadCount := len(uploads)
for index, upload := range uploads { for index, upload := range uploads {
username := utils.GetUsernameFromString(upload.Uploader) username := utils.GetUsernameFromString(upload.Uploader)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username) downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir) fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir) downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount) log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
} }
utils.Log() log.Log()
}
// GetMusicJSON - Prints scraped info from music
func GetMusicJSON(url string) {
uploads, err := client.GetMusicUploadsJSON(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
} }

View File

@@ -0,0 +1,27 @@
package workflows
import (
client "../client"
res "../resources"
log "../utils/log"
"regexp"
)
// CanUseDownloadShareLink - Check's if DownloadShareLink can be used
func CanUseDownloadShareLink(url string) bool {
match, _ := regexp.MatchString("vm.tiktok.com\\/.+", url)
return match
}
// DownloadShareLink - Download item by share link
func DownloadShareLink(url string) {
log.Logf("Resolving share link: %s\n", url)
finalURL, err := client.GetRedirectUrl(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
StartWorkflowByParameter(finalURL)
}

View File

@@ -1,30 +1,53 @@
package workflows package workflows
import ( import (
"fmt"
"regexp"
"strings"
client "../client" client "../client"
config "../models/config" config "../models/config"
res "../resources"
utils "../utils" utils "../utils"
"fmt" fileio "../utils/fileio"
"strings" log "../utils/log"
) )
// CanUseDownloadUser - Test's if this workflow can be used for parameter // CanUseDownloadUser - Test's if this workflow can be used for parameter
func CanUseDownloadUser(url string) bool { func CanUseDownloadUser(url string) bool {
match := strings.Contains(url, "/") isURL := strings.Contains(url, "/")
return !match match, _ := regexp.MatchString(".+com\\/@[^\\/]+", url)
return !isURL || match
} }
// DownloadUser - Download all user's videos // DownloadUser - Download all user's videos
func DownloadUser(username string) { func DownloadUser(username string) {
uploads := client.GetUserUploads(username) uploads, err := client.GetUserUploads(username)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
uploads = utils.RemoveArchivedItems(uploads)
uploadCount := len(uploads) uploadCount := len(uploads)
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username) downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir) fileio.InitOutputDirectory(downloadDir)
for index, upload := range uploads { for index, upload := range uploads {
downloadVideo(upload, downloadDir) downloadVideo(upload, downloadDir)
utils.Logf("\r[%d/%d] Downloaded", index+1, uploadCount) log.Logf("\r[%d/%d] Downloaded", index+1, uploadCount)
} }
utils.Log() log.Log()
}
// GetUserVideosJSON - Prints scraped info from user
func GetUserVideosJSON(username string) {
uploads, err := client.GetUserUploadsJSON(username)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
fmt.Printf("%s", uploads)
} }

View File

@@ -1,12 +1,16 @@
package workflows package workflows
import ( import (
"fmt"
"regexp"
client "../client" client "../client"
models "../models" models "../models"
config "../models/config" config "../models/config"
res "../resources"
utils "../utils" utils "../utils"
"fmt" fileio "../utils/fileio"
"regexp" log "../utils/log"
) )
// CanUseDownloadSingleVideo - Check's if DownloadSingleVideo can be used for parameter // CanUseDownloadSingleVideo - Check's if DownloadSingleVideo can be used for parameter
@@ -18,12 +22,20 @@ func CanUseDownloadSingleVideo(url string) bool {
// DownloadSingleVideo - Downloads single video // DownloadSingleVideo - Downloads single video
func DownloadSingleVideo(url string) { func DownloadSingleVideo(url string) {
username := utils.GetUsernameFromString(url) username := utils.GetUsernameFromString(url)
upload := client.GetVideoDetails(url) upload, err := client.GetVideoDetails(url)
if err != nil {
log.LogErr(res.ErrorCouldNotGetUserUploads, err.Error())
return
}
if utils.IsItemInArchive(upload) {
return
}
downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username) downloadDir := fmt.Sprintf("%s/%s", config.Config.OutputPath, username)
utils.InitOutputDirectory(downloadDir) fileio.InitOutputDirectory(downloadDir)
downloadVideo(upload, downloadDir) downloadVideo(upload, downloadDir)
utils.Log("[1/1] Downloaded\n") log.Log("[1/1] Downloaded\n")
} }
// DownloadVideo - Downloads one video // DownloadVideo - Downloads one video
@@ -31,7 +43,7 @@ func downloadVideo(upload models.Upload, downloadDir string) {
uploadID := upload.GetUploadID() uploadID := upload.GetUploadID()
downloadPath := fmt.Sprintf("%s/%s.mp4", downloadDir, uploadID) downloadPath := fmt.Sprintf("%s/%s.mp4", downloadDir, uploadID)
if utils.CheckIfExists(downloadPath) { if fileio.CheckIfExists(downloadPath) {
return return
} }
@@ -41,4 +53,6 @@ func downloadVideo(upload models.Upload, downloadDir string) {
metadataPath := fmt.Sprintf("%s/%s.json", downloadDir, uploadID) metadataPath := fmt.Sprintf("%s/%s.json", downloadDir, uploadID)
upload.WriteToFile(metadataPath) upload.WriteToFile(metadataPath)
} }
utils.AddItemToArchive(upload.GetUploadID())
} }

View File

@@ -1,8 +1,10 @@
package workflows package workflows
import ( import (
config "../models/config"
res "../resources" res "../resources"
utils "../utils" utils "../utils"
log "../utils/log"
) )
// StartWorkflowByParameter - Start needed workflow by given parameter // StartWorkflowByParameter - Start needed workflow by given parameter
@@ -10,7 +12,11 @@ func StartWorkflowByParameter(url string) {
// Music // Music
if CanUseDownloadMusic(url) { if CanUseDownloadMusic(url) {
DownloadMusic(url) if config.Config.JSONOnly {
GetMusicJSON(url)
} else {
DownloadMusic(url)
}
return return
} }
@@ -22,9 +28,30 @@ func StartWorkflowByParameter(url string) {
// Tiktok user // Tiktok user
if CanUseDownloadUser(url) { if CanUseDownloadUser(url) {
DownloadUser(utils.GetUsernameFromString(url)) if config.Config.JSONOnly {
GetUserVideosJSON(utils.GetUsernameFromString(url))
} else {
DownloadUser(utils.GetUsernameFromString(url))
}
return return
} }
utils.LogFatal(res.ErrorCouldNotRecogniseURL, url) // Tiktok hashtag
if CanUseDownloadHashtag(url) {
if config.Config.JSONOnly {
GetHashtagJSON(url)
} else {
DownloadHashtag(url)
}
return
}
// Share URL
if CanUseDownloadShareLink(url) {
DownloadShareLink(url)
return
}
log.LogFatal(res.ErrorCouldNotRecogniseURL, url)
} }