mirror of
https://github.com/ytdl-org/youtube-dl
synced 2025-07-18 01:14:14 +09:00
Compare commits
16 Commits
2259e94b95
...
9dfc2a2cd3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9dfc2a2cd3 | ||
![]() |
420d53387c | ||
![]() |
32f89de92b | ||
![]() |
283dca56fe | ||
![]() |
422b1b31cf | ||
![]() |
1dc27e1c3b | ||
![]() |
af049e309b | ||
![]() |
94849bc997 | ||
![]() |
974c7d7f34 | ||
![]() |
8738407d77 | ||
![]() |
cecaa18b80 | ||
![]() |
98464190b0 | ||
![]() |
6a3ba7df8d | ||
![]() |
9058a0c9c4 | ||
![]() |
1c846f474f | ||
![]() |
a2eb11b428 |
172
.github/workflows/build.yml
vendored
Normal file
172
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,172 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
description: youtube-dl, youtube-dl-py3, youtube-dl.tar.gz
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
description: youtube-dl.exe
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
unix:
|
||||
if: inputs.unix
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update_version.py "${{ inputs.version }}"
|
||||
python devscripts/changelog.py --update
|
||||
python devscripts/make_lazy_extractors.py youtube_dl/extractor/lazy_extractors.py
|
||||
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
youtube-dl
|
||||
youtube-dl-py3
|
||||
youtube-dl.tar.gz
|
||||
compression-level: 0
|
||||
|
||||
windows32:
|
||||
if: inputs.windows32
|
||||
runs-on: windows-2022
|
||||
env:
|
||||
PYCRYPTO: pycrypto-2.6.1-cp34-none-win32
|
||||
# Workaround for Python 3.4/5 PyPi certificate verification failures - May 2024
|
||||
PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
# required for Windows XP support
|
||||
python-version: "3.4"
|
||||
architecture: x86
|
||||
|
||||
- name: Install packages
|
||||
# https://pip.pypa.io/en/stable/news/#v19-2
|
||||
# https://setuptools.pypa.io/en/latest/history.html#v44-0-0
|
||||
# https://wheel.readthedocs.io/en/stable/news.html
|
||||
# https://pypi.org/project/py2exe/0.9.2.2
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade \
|
||||
"pip<19.2" \
|
||||
"setuptools<44" \
|
||||
"wheel<0.34.0" \
|
||||
"py2exe==0.9.2.2" \
|
||||
;
|
||||
|
||||
- name: PyCrypto cache
|
||||
id: cache_pycrypto
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: ${{ env.PYCRYPTO }}
|
||||
path: ./${{ env.PYCRYPTO }}
|
||||
|
||||
- name: PyCrypto download
|
||||
if: |
|
||||
steps.cache_pycrypto.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "${PYCRYPTO}"
|
||||
cd "${PYCRYPTO}"
|
||||
curl -L -O "https://web.archive.org/web/20200627032153/http://www.voidspace.org.uk/python/pycrypto-2.6.1/${PYCRYPTO}.whl"
|
||||
|
||||
- name: PyCrypto install
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install "./${PYCRYPTO}/${PYCRYPTO}.whl"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update_version.py "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py youtube_dl/extractor/lazy_extractors.py
|
||||
|
||||
- name: Build binary
|
||||
run: python setup.py py2exe
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
youtube-dl.exe
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
if: always() && !cancelled()
|
||||
needs:
|
||||
- unix
|
||||
- windows32
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Make SHA2-SUMS files
|
||||
run: |
|
||||
cd ./artifact/
|
||||
# make sure SHA sums are also printed to stdout
|
||||
sha256sum -- * | tee ../SHA2-256SUMS
|
||||
sha512sum -- * | tee ../SHA2-512SUMS
|
||||
# also print as permanent annotations to the summary page
|
||||
while read -r shasum; do
|
||||
echo "::notice title=${shasum##* }::sha256: ${shasum% *}"
|
||||
done < ../SHA2-256SUMS
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
SHA*SUMS*
|
||||
compression-level: 0
|
||||
overwrite: true
|
147
.github/workflows/release.yml
vendored
Normal file
147
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,147 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
(default: auto-generated)
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
prerelease:
|
||||
description: Pre-release
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup variables
|
||||
id: setup_variables
|
||||
run: |
|
||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||
version="$(
|
||||
python devscripts/update_version.py \
|
||||
${{ inputs.version || '"${revision}"' }} )"
|
||||
echo "::group::Output variables"
|
||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||
version=${version}
|
||||
EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Update documentation
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease
|
||||
run: |
|
||||
python devscripts/changelog.py --update
|
||||
make README.md
|
||||
make issuetemplates
|
||||
make supportedsites
|
||||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
creator: ${{ github.event.sender.login }}
|
||||
if: |
|
||||
!inputs.prerelease
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add -u
|
||||
git commit -m "Release ${version}" \
|
||||
-m "Created by: ${creator}" \
|
||||
-m ":ci skip all"
|
||||
git push origin --force master:release
|
||||
|
||||
- name: Get target commitish
|
||||
id: get_target
|
||||
run: |
|
||||
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update master
|
||||
env:
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease
|
||||
run: |
|
||||
git push origin ${{ github.event.ref }}
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
publish:
|
||||
needs: [prepare, build]
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
run: |
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
<details><summary><h3>Changelog</h3></summary>
|
||||
|
||||
$(python devscripts/changelog.py)
|
||||
|
||||
</details>
|
||||
EOF
|
||||
cat > ./PRERELEASE_NOTES << EOF
|
||||
**This is a pre-release build**
|
||||
---
|
||||
|
||||
$(cat ./RELEASE_NOTES)
|
||||
EOF
|
||||
|
||||
- name: Publish release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
run: |
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ env.head_sha }} \
|
||||
--title "youtube-dl ${version}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
"${version}" \
|
||||
artifact/*
|
15
Makefile
15
Makefile
@ -1,7 +1,7 @@
|
||||
all: youtube-dl README.md CONTRIBUTING.md README.txt youtube-dl.1 youtube-dl.bash-completion youtube-dl.zsh youtube-dl.fish supportedsites
|
||||
all: youtube-dl youtube-dl-py3 README.md CONTRIBUTING.md README.txt youtube-dl.1 youtube-dl.bash-completion youtube-dl.zsh youtube-dl.fish supportedsites
|
||||
|
||||
clean:
|
||||
rm -rf youtube-dl.1.temp.md youtube-dl.1 youtube-dl.bash-completion README.txt MANIFEST build/ dist/ .coverage cover/ youtube-dl.tar.gz youtube-dl.zsh youtube-dl.fish youtube_dl/extractor/lazy_extractors.py *.dump *.part* *.ytdl *.info.json *.mp4 *.m4a *.flv *.mp3 *.avi *.mkv *.webm *.3gp *.wav *.ape *.swf *.jpg *.png CONTRIBUTING.md.tmp youtube-dl youtube-dl.exe
|
||||
rm -rf youtube-dl.1.temp.md youtube-dl.1 youtube-dl.bash-completion README.txt MANIFEST build/ dist/ .coverage cover/ youtube-dl.tar.gz youtube-dl.zsh youtube-dl.fish youtube_dl/extractor/lazy_extractors.py *.dump *.part* *.ytdl *.info.json *.mp4 *.m4a *.flv *.mp3 *.avi *.mkv *.webm *.3gp *.wav *.ape *.swf *.jpg *.png CONTRIBUTING.md.tmp youtube-dl youtube-dl-py3 youtube-dl.zip youtube-dl.exe
|
||||
find . -name "*.pyc" -delete
|
||||
find . -name "*.class" -delete
|
||||
|
||||
@ -10,6 +10,7 @@ BINDIR ?= $(PREFIX)/bin
|
||||
MANDIR ?= $(PREFIX)/man
|
||||
SHAREDIR ?= $(PREFIX)/share
|
||||
PYTHON ?= /usr/bin/env python
|
||||
PYTHON3 ?= /usr/bin/env python3
|
||||
|
||||
# set SYSCONFDIR to /etc if PREFIX=/usr or PREFIX=/usr/local
|
||||
SYSCONFDIR = $(shell if [ $(PREFIX) = /usr -o $(PREFIX) = /usr/local ]; then echo /etc; else echo $(PREFIX)/etc; fi)
|
||||
@ -57,7 +58,7 @@ tar: youtube-dl.tar.gz
|
||||
|
||||
pypi-files: youtube-dl.bash-completion README.txt youtube-dl.1 youtube-dl.fish
|
||||
|
||||
youtube-dl: youtube_dl/*.py youtube_dl/*/*.py
|
||||
youtube-dl.zip: youtube_dl/*.py youtube_dl/*/*.py
|
||||
mkdir -p zip
|
||||
for d in youtube_dl youtube_dl/downloader youtube_dl/extractor youtube_dl/postprocessor ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
@ -67,11 +68,17 @@ youtube-dl: youtube_dl/*.py youtube_dl/*/*.py
|
||||
mv zip/youtube_dl/__main__.py zip/
|
||||
cd zip ; zip -q ../youtube-dl youtube_dl/*.py youtube_dl/*/*.py __main__.py
|
||||
rm -rf zip
|
||||
|
||||
youtube-dl: youtube-dl.zip
|
||||
echo '#!$(PYTHON)' > youtube-dl
|
||||
cat youtube-dl.zip >> youtube-dl
|
||||
rm youtube-dl.zip
|
||||
chmod a+x youtube-dl
|
||||
|
||||
youtube-dl-py3: youtube-dl.zip
|
||||
echo '#!$(PYTHON3)' > youtube-dl-py3
|
||||
cat youtube-dl.zip >> youtube-dl-py3
|
||||
chmod a+x youtube-dl-py3
|
||||
|
||||
README.md: youtube_dl/*.py youtube_dl/*/*.py
|
||||
COLUMNS=80 $(PYTHON) youtube_dl/__main__.py --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
|
@ -1,433 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
import ctypes
|
||||
import functools
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import traceback
|
||||
import os.path
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
|
||||
from youtube_dl.compat import (
|
||||
compat_input,
|
||||
compat_http_server,
|
||||
compat_str,
|
||||
compat_urlparse,
|
||||
)
|
||||
|
||||
# These are not used outside of buildserver.py thus not in compat.py
|
||||
|
||||
try:
|
||||
import winreg as compat_winreg
|
||||
except ImportError: # Python 2
|
||||
import _winreg as compat_winreg
|
||||
|
||||
try:
|
||||
import socketserver as compat_socketserver
|
||||
except ImportError: # Python 2
|
||||
import SocketServer as compat_socketserver
|
||||
|
||||
|
||||
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
advapi32 = ctypes.windll.advapi32
|
||||
|
||||
SC_MANAGER_ALL_ACCESS = 0xf003f
|
||||
SC_MANAGER_CREATE_SERVICE = 0x02
|
||||
SERVICE_WIN32_OWN_PROCESS = 0x10
|
||||
SERVICE_AUTO_START = 0x2
|
||||
SERVICE_ERROR_NORMAL = 0x1
|
||||
DELETE = 0x00010000
|
||||
SERVICE_STATUS_START_PENDING = 0x00000002
|
||||
SERVICE_STATUS_RUNNING = 0x00000004
|
||||
SERVICE_ACCEPT_STOP = 0x1
|
||||
|
||||
SVCNAME = 'youtubedl_builder'
|
||||
|
||||
LPTSTR = ctypes.c_wchar_p
|
||||
START_CALLBACK = ctypes.WINFUNCTYPE(None, ctypes.c_int, ctypes.POINTER(LPTSTR))
|
||||
|
||||
|
||||
class SERVICE_TABLE_ENTRY(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('lpServiceName', LPTSTR),
|
||||
('lpServiceProc', START_CALLBACK)
|
||||
]
|
||||
|
||||
|
||||
HandlerEx = ctypes.WINFUNCTYPE(
|
||||
ctypes.c_int, # return
|
||||
ctypes.c_int, # dwControl
|
||||
ctypes.c_int, # dwEventType
|
||||
ctypes.c_void_p, # lpEventData,
|
||||
ctypes.c_void_p, # lpContext,
|
||||
)
|
||||
|
||||
|
||||
def _ctypes_array(c_type, py_array):
|
||||
ar = (c_type * len(py_array))()
|
||||
ar[:] = py_array
|
||||
return ar
|
||||
|
||||
|
||||
def win_OpenSCManager():
|
||||
res = advapi32.OpenSCManagerW(None, None, SC_MANAGER_ALL_ACCESS)
|
||||
if not res:
|
||||
raise Exception('Opening service manager failed - '
|
||||
'are you running this as administrator?')
|
||||
return res
|
||||
|
||||
|
||||
def win_install_service(service_name, cmdline):
|
||||
manager = win_OpenSCManager()
|
||||
try:
|
||||
h = advapi32.CreateServiceW(
|
||||
manager, service_name, None,
|
||||
SC_MANAGER_CREATE_SERVICE, SERVICE_WIN32_OWN_PROCESS,
|
||||
SERVICE_AUTO_START, SERVICE_ERROR_NORMAL,
|
||||
cmdline, None, None, None, None, None)
|
||||
if not h:
|
||||
raise OSError('Service creation failed: %s' % ctypes.FormatError())
|
||||
|
||||
advapi32.CloseServiceHandle(h)
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(manager)
|
||||
|
||||
|
||||
def win_uninstall_service(service_name):
|
||||
manager = win_OpenSCManager()
|
||||
try:
|
||||
h = advapi32.OpenServiceW(manager, service_name, DELETE)
|
||||
if not h:
|
||||
raise OSError('Could not find service %s: %s' % (
|
||||
service_name, ctypes.FormatError()))
|
||||
|
||||
try:
|
||||
if not advapi32.DeleteService(h):
|
||||
raise OSError('Deletion failed: %s' % ctypes.FormatError())
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(h)
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(manager)
|
||||
|
||||
|
||||
def win_service_report_event(service_name, msg, is_error=True):
|
||||
with open('C:/sshkeys/log', 'a', encoding='utf-8') as f:
|
||||
f.write(msg + '\n')
|
||||
|
||||
event_log = advapi32.RegisterEventSourceW(None, service_name)
|
||||
if not event_log:
|
||||
raise OSError('Could not report event: %s' % ctypes.FormatError())
|
||||
|
||||
try:
|
||||
type_id = 0x0001 if is_error else 0x0004
|
||||
event_id = 0xc0000000 if is_error else 0x40000000
|
||||
lines = _ctypes_array(LPTSTR, [msg])
|
||||
|
||||
if not advapi32.ReportEventW(
|
||||
event_log, type_id, 0, event_id, None, len(lines), 0,
|
||||
lines, None):
|
||||
raise OSError('Event reporting failed: %s' % ctypes.FormatError())
|
||||
finally:
|
||||
advapi32.DeregisterEventSource(event_log)
|
||||
|
||||
|
||||
def win_service_handler(stop_event, *args):
|
||||
try:
|
||||
raise ValueError('Handler called with args ' + repr(args))
|
||||
TODO
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def win_service_set_status(handle, status_code):
|
||||
svcStatus = SERVICE_STATUS()
|
||||
svcStatus.dwServiceType = SERVICE_WIN32_OWN_PROCESS
|
||||
svcStatus.dwCurrentState = status_code
|
||||
svcStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP
|
||||
|
||||
svcStatus.dwServiceSpecificExitCode = 0
|
||||
|
||||
if not advapi32.SetServiceStatus(handle, ctypes.byref(svcStatus)):
|
||||
raise OSError('SetServiceStatus failed: %r' % ctypes.FormatError())
|
||||
|
||||
|
||||
def win_service_main(service_name, real_main, argc, argv_raw):
|
||||
try:
|
||||
# args = [argv_raw[i].value for i in range(argc)]
|
||||
stop_event = threading.Event()
|
||||
handler = HandlerEx(functools.partial(stop_event, win_service_handler))
|
||||
h = advapi32.RegisterServiceCtrlHandlerExW(service_name, handler, None)
|
||||
if not h:
|
||||
raise OSError('Handler registration failed: %s' %
|
||||
ctypes.FormatError())
|
||||
|
||||
TODO
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def win_service_start(service_name, real_main):
|
||||
try:
|
||||
cb = START_CALLBACK(
|
||||
functools.partial(win_service_main, service_name, real_main))
|
||||
dispatch_table = _ctypes_array(SERVICE_TABLE_ENTRY, [
|
||||
SERVICE_TABLE_ENTRY(
|
||||
service_name,
|
||||
cb
|
||||
),
|
||||
SERVICE_TABLE_ENTRY(None, ctypes.cast(None, START_CALLBACK))
|
||||
])
|
||||
|
||||
if not advapi32.StartServiceCtrlDispatcherW(dispatch_table):
|
||||
raise OSError('ctypes start failed: %s' % ctypes.FormatError())
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def main(args=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-i', '--install',
|
||||
action='store_const', dest='action', const='install',
|
||||
help='Launch at Windows startup')
|
||||
parser.add_argument('-u', '--uninstall',
|
||||
action='store_const', dest='action', const='uninstall',
|
||||
help='Remove Windows service')
|
||||
parser.add_argument('-s', '--service',
|
||||
action='store_const', dest='action', const='service',
|
||||
help='Run as a Windows service')
|
||||
parser.add_argument('-b', '--bind', metavar='<host:port>',
|
||||
action='store', default='0.0.0.0:8142',
|
||||
help='Bind to host:port (default %default)')
|
||||
options = parser.parse_args(args=args)
|
||||
|
||||
if options.action == 'install':
|
||||
fn = os.path.abspath(__file__).replace('v:', '\\\\vboxsrv\\vbox')
|
||||
cmdline = '%s %s -s -b %s' % (sys.executable, fn, options.bind)
|
||||
win_install_service(SVCNAME, cmdline)
|
||||
return
|
||||
|
||||
if options.action == 'uninstall':
|
||||
win_uninstall_service(SVCNAME)
|
||||
return
|
||||
|
||||
if options.action == 'service':
|
||||
win_service_start(SVCNAME, main)
|
||||
return
|
||||
|
||||
host, port_str = options.bind.split(':')
|
||||
port = int(port_str)
|
||||
|
||||
print('Listening on %s:%d' % (host, port))
|
||||
srv = BuildHTTPServer((host, port), BuildHTTPRequestHandler)
|
||||
thr = threading.Thread(target=srv.serve_forever)
|
||||
thr.start()
|
||||
compat_input('Press ENTER to shut down')
|
||||
srv.shutdown()
|
||||
thr.join()
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
for name in os.listdir(path):
|
||||
fname = os.path.join(path, name)
|
||||
if os.path.isdir(fname):
|
||||
rmtree(fname)
|
||||
else:
|
||||
os.chmod(fname, 0o666)
|
||||
os.remove(fname)
|
||||
os.rmdir(path)
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
def __init__(self, output, code=500):
|
||||
self.output = output
|
||||
self.code = code
|
||||
|
||||
def __str__(self):
|
||||
return self.output
|
||||
|
||||
|
||||
class HTTPError(BuildError):
|
||||
pass
|
||||
|
||||
|
||||
class PythonBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
python_version = kwargs.pop('python', '3.4')
|
||||
python_path = None
|
||||
for node in ('Wow6432Node\\', ''):
|
||||
try:
|
||||
key = compat_winreg.OpenKey(
|
||||
compat_winreg.HKEY_LOCAL_MACHINE,
|
||||
r'SOFTWARE\%sPython\PythonCore\%s\InstallPath' % (node, python_version))
|
||||
try:
|
||||
python_path, _ = compat_winreg.QueryValueEx(key, '')
|
||||
finally:
|
||||
compat_winreg.CloseKey(key)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not python_path:
|
||||
raise BuildError('No such Python version: %s' % python_version)
|
||||
|
||||
self.pythonPath = python_path
|
||||
|
||||
super(PythonBuilder, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class GITInfoBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
try:
|
||||
self.user, self.repoName = kwargs['path'][:2]
|
||||
self.rev = kwargs.pop('rev')
|
||||
except ValueError:
|
||||
raise BuildError('Invalid path')
|
||||
except KeyError as e:
|
||||
raise BuildError('Missing mandatory parameter "%s"' % e.args[0])
|
||||
|
||||
path = os.path.join(os.environ['APPDATA'], 'Build archive', self.repoName, self.user)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
self.basePath = tempfile.mkdtemp(dir=path)
|
||||
self.buildPath = os.path.join(self.basePath, 'build')
|
||||
|
||||
super(GITInfoBuilder, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class GITBuilder(GITInfoBuilder):
|
||||
def build(self):
|
||||
try:
|
||||
subprocess.check_output(['git', 'clone', 'git://github.com/%s/%s.git' % (self.user, self.repoName), self.buildPath])
|
||||
subprocess.check_output(['git', 'checkout', self.rev], cwd=self.buildPath)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise BuildError(e.output)
|
||||
|
||||
super(GITBuilder, self).build()
|
||||
|
||||
|
||||
class YoutubeDLBuilder(object):
|
||||
authorizedUsers = ['fraca7', 'phihag', 'rg3', 'FiloSottile', 'ytdl-org']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if self.repoName != 'youtube-dl':
|
||||
raise BuildError('Invalid repository "%s"' % self.repoName)
|
||||
if self.user not in self.authorizedUsers:
|
||||
raise HTTPError('Unauthorized user "%s"' % self.user, 401)
|
||||
|
||||
super(YoutubeDLBuilder, self).__init__(**kwargs)
|
||||
|
||||
def build(self):
|
||||
try:
|
||||
proc = subprocess.Popen([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'], stdin=subprocess.PIPE, cwd=self.buildPath)
|
||||
proc.wait()
|
||||
#subprocess.check_output([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'],
|
||||
# cwd=self.buildPath)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise BuildError(e.output)
|
||||
|
||||
super(YoutubeDLBuilder, self).build()
|
||||
|
||||
|
||||
class DownloadBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.handler = kwargs.pop('handler')
|
||||
self.srcPath = os.path.join(self.buildPath, *tuple(kwargs['path'][2:]))
|
||||
self.srcPath = os.path.abspath(os.path.normpath(self.srcPath))
|
||||
if not self.srcPath.startswith(self.buildPath):
|
||||
raise HTTPError(self.srcPath, 401)
|
||||
|
||||
super(DownloadBuilder, self).__init__(**kwargs)
|
||||
|
||||
def build(self):
|
||||
if not os.path.exists(self.srcPath):
|
||||
raise HTTPError('No such file', 404)
|
||||
if os.path.isdir(self.srcPath):
|
||||
raise HTTPError('Is a directory: %s' % self.srcPath, 401)
|
||||
|
||||
self.handler.send_response(200)
|
||||
self.handler.send_header('Content-Type', 'application/octet-stream')
|
||||
self.handler.send_header('Content-Disposition', 'attachment; filename=%s' % os.path.split(self.srcPath)[-1])
|
||||
self.handler.send_header('Content-Length', str(os.stat(self.srcPath).st_size))
|
||||
self.handler.end_headers()
|
||||
|
||||
with open(self.srcPath, 'rb') as src:
|
||||
shutil.copyfileobj(src, self.handler.wfile)
|
||||
|
||||
super(DownloadBuilder, self).build()
|
||||
|
||||
|
||||
class CleanupTempDir(object):
|
||||
def build(self):
|
||||
try:
|
||||
rmtree(self.basePath)
|
||||
except Exception as e:
|
||||
print('WARNING deleting "%s": %s' % (self.basePath, e))
|
||||
|
||||
super(CleanupTempDir, self).build()
|
||||
|
||||
|
||||
class Null(object):
|
||||
def __init__(self, **kwargs):
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def build(self):
|
||||
pass
|
||||
|
||||
|
||||
class Builder(PythonBuilder, GITBuilder, YoutubeDLBuilder, DownloadBuilder, CleanupTempDir, Null):
|
||||
pass
|
||||
|
||||
|
||||
class BuildHTTPRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
actionDict = {'build': Builder, 'download': Builder} # They're the same, no more caching.
|
||||
|
||||
def do_GET(self):
|
||||
path = compat_urlparse.urlparse(self.path)
|
||||
paramDict = dict([(key, value[0]) for key, value in compat_urlparse.parse_qs(path.query).items()])
|
||||
action, _, path = path.path.strip('/').partition('/')
|
||||
if path:
|
||||
path = path.split('/')
|
||||
if action in self.actionDict:
|
||||
try:
|
||||
builder = self.actionDict[action](path=path, handler=self, **paramDict)
|
||||
builder.start()
|
||||
try:
|
||||
builder.build()
|
||||
finally:
|
||||
builder.close()
|
||||
except BuildError as e:
|
||||
self.send_response(e.code)
|
||||
msg = compat_str(e).encode('UTF-8')
|
||||
self.send_header('Content-Type', 'text/plain; charset=UTF-8')
|
||||
self.send_header('Content-Length', len(msg))
|
||||
self.end_headers()
|
||||
self.wfile.write(msg)
|
||||
else:
|
||||
self.send_response(500, 'Unknown build method "%s"' % action)
|
||||
else:
|
||||
self.send_response(500, 'Malformed URL')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
100
devscripts/changelog.py
Executable file
100
devscripts/changelog.py
Executable file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def run(args):
|
||||
process = subprocess.Popen(args, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
return process.communicate()[0].strip()
|
||||
|
||||
|
||||
def is_core(short):
|
||||
prefix = None
|
||||
if ']' in short:
|
||||
prefix = short.partition(']')[0][1:]
|
||||
elif ': ' in short:
|
||||
prefix = short.partition(': ')[0]
|
||||
|
||||
if not prefix or ' ' in prefix:
|
||||
return True
|
||||
|
||||
prefix = prefix.partition(':')[0].lower()
|
||||
if prefix.startswith('extractor/'):
|
||||
prefix = prefix[len('extractor/'):]
|
||||
if prefix.endswith('ie'):
|
||||
prefix = prefix[:-len('ie')]
|
||||
return not os.path.exists('youtube_dl/extractor/%s.py' % prefix)
|
||||
|
||||
|
||||
def format_line(markdown, short, sha):
|
||||
if not markdown:
|
||||
return '* ' + short
|
||||
|
||||
return '* [%s](https://github.com/ytdl-org/youtube-dl/commit/%s)' % (short, sha)
|
||||
|
||||
|
||||
def generate_changelog(markdown):
|
||||
most_recent_tag = run([
|
||||
'git', 'tag', '--list', '--sort=-v:refname',
|
||||
'????.??.??', '????.??.??.?',
|
||||
]).split('\n')[0]
|
||||
lines = run([
|
||||
'git', 'log',
|
||||
'--format=format:%H%n%s', '--no-merges', '-z',
|
||||
most_recent_tag + '..HEAD',
|
||||
]).split('\x00')
|
||||
|
||||
core = []
|
||||
extractor = []
|
||||
for line in lines:
|
||||
if not line:
|
||||
continue
|
||||
sha, short = line.split('\n')
|
||||
|
||||
if ' * ' in short:
|
||||
short = short.partition(' * ')[0]
|
||||
|
||||
target = core if is_core(short) else extractor
|
||||
target.append((sha, short))
|
||||
|
||||
result = []
|
||||
if core:
|
||||
result.append('#### Core' if markdown else 'Core')
|
||||
for sha, short in core:
|
||||
result.append(format_line(markdown, short, sha))
|
||||
result.append('')
|
||||
|
||||
if extractor:
|
||||
result.append('#### Extractor' if markdown else 'Extractor')
|
||||
for sha, short in extractor:
|
||||
result.append(format_line(markdown, short, sha))
|
||||
result.append('')
|
||||
|
||||
return '\n'.join(result)
|
||||
|
||||
|
||||
def read_version():
|
||||
with open('youtube_dl/version.py', 'r') as f:
|
||||
exec(compile(f.read(), 'youtube_dl/version.py', 'exec'))
|
||||
|
||||
return locals()['__version__']
|
||||
|
||||
|
||||
update_in_place = len(sys.argv) > 1 and sys.argv[1] == '--update'
|
||||
changelog = generate_changelog(not update_in_place)
|
||||
|
||||
if not update_in_place:
|
||||
print(changelog)
|
||||
sys.exit()
|
||||
|
||||
with open('ChangeLog', 'rb') as file:
|
||||
data = file.read()
|
||||
|
||||
with open('ChangeLog', 'wb') as file:
|
||||
file.write(('version %s\n\n' % read_version()).encode('utf-8'))
|
||||
file.write(changelog.encode('utf-8'))
|
||||
file.write('\n\n'.encode('utf-8'))
|
||||
file.write(data)
|
@ -1,111 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import mimetypes
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
dirn = os.path.dirname
|
||||
|
||||
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
|
||||
|
||||
from youtube_dl.compat import (
|
||||
compat_basestring,
|
||||
compat_getpass,
|
||||
compat_print,
|
||||
compat_urllib_request,
|
||||
)
|
||||
from youtube_dl.utils import (
|
||||
make_HTTPS_handler,
|
||||
sanitized_Request,
|
||||
)
|
||||
from utils import read_file
|
||||
|
||||
|
||||
class GitHubReleaser(object):
|
||||
_API_URL = 'https://api.github.com/repos/ytdl-org/youtube-dl/releases'
|
||||
_UPLOADS_URL = 'https://uploads.github.com/repos/ytdl-org/youtube-dl/releases/%s/assets?name=%s'
|
||||
_NETRC_MACHINE = 'github.com'
|
||||
|
||||
def __init__(self, debuglevel=0):
|
||||
self._init_github_account()
|
||||
https_handler = make_HTTPS_handler({}, debuglevel=debuglevel)
|
||||
self._opener = compat_urllib_request.build_opener(https_handler)
|
||||
|
||||
def _init_github_account(self):
|
||||
try:
|
||||
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
|
||||
if info is not None:
|
||||
self._token = info[2]
|
||||
compat_print('Using GitHub credentials found in .netrc...')
|
||||
return
|
||||
else:
|
||||
compat_print('No GitHub credentials found in .netrc')
|
||||
except (IOError, netrc.NetrcParseError):
|
||||
compat_print('Unable to parse .netrc')
|
||||
self._token = compat_getpass(
|
||||
'Type your GitHub PAT (personal access token) and press [Return]: ')
|
||||
|
||||
def _call(self, req):
|
||||
if isinstance(req, compat_basestring):
|
||||
req = sanitized_Request(req)
|
||||
req.add_header('Authorization', 'token %s' % self._token)
|
||||
response = self._opener.open(req).read().decode('utf-8')
|
||||
return json.loads(response)
|
||||
|
||||
def list_releases(self):
|
||||
return self._call(self._API_URL)
|
||||
|
||||
def create_release(self, tag_name, name=None, body='', draft=False, prerelease=False):
|
||||
data = {
|
||||
'tag_name': tag_name,
|
||||
'target_commitish': 'master',
|
||||
'name': name,
|
||||
'body': body,
|
||||
'draft': draft,
|
||||
'prerelease': prerelease,
|
||||
}
|
||||
req = sanitized_Request(self._API_URL, json.dumps(data).encode('utf-8'))
|
||||
return self._call(req)
|
||||
|
||||
def create_asset(self, release_id, asset):
|
||||
asset_name = os.path.basename(asset)
|
||||
url = self._UPLOADS_URL % (release_id, asset_name)
|
||||
# Our files are small enough to be loaded directly into memory.
|
||||
data = open(asset, 'rb').read()
|
||||
req = sanitized_Request(url, data)
|
||||
mime_type, _ = mimetypes.guess_type(asset_name)
|
||||
req.add_header('Content-Type', mime_type or 'application/octet-stream')
|
||||
return self._call(req)
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog CHANGELOG VERSION BUILDPATH')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 3:
|
||||
parser.error('Expected a version and a build directory')
|
||||
|
||||
changelog_file, version, build_path = args
|
||||
|
||||
changelog = read_file(changelog_file)
|
||||
|
||||
mobj = re.search(r'(?s)version %s\n{2}(.+?)\n{3}' % version, changelog)
|
||||
body = mobj.group(1) if mobj else ''
|
||||
|
||||
releaser = GitHubReleaser()
|
||||
|
||||
new_release = releaser.create_release(
|
||||
version, name='youtube-dl %s' % version, body=body)
|
||||
release_id = new_release['id']
|
||||
|
||||
for asset in os.listdir(build_path):
|
||||
compat_print('Uploading %s...' % asset)
|
||||
releaser.create_asset(release_id, os.path.join(build_path, asset))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,141 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# IMPORTANT: the following assumptions are made
|
||||
# * the GH repo is on the origin remote
|
||||
# * the gh-pages branch is named so locally
|
||||
# * the git config user.signingkey is properly set
|
||||
|
||||
# You will need
|
||||
# pip install coverage nose rsa wheel
|
||||
|
||||
# TODO
|
||||
# release notes
|
||||
# make hash on local files
|
||||
|
||||
set -e
|
||||
|
||||
skip_tests=true
|
||||
gpg_sign_commits=""
|
||||
buildserver='localhost:8142'
|
||||
|
||||
while true
|
||||
do
|
||||
case "$1" in
|
||||
--run-tests)
|
||||
skip_tests=false
|
||||
shift
|
||||
;;
|
||||
--gpg-sign-commits|-S)
|
||||
gpg_sign_commits="-S"
|
||||
shift
|
||||
;;
|
||||
--buildserver)
|
||||
buildserver="$2"
|
||||
shift 2
|
||||
;;
|
||||
--*)
|
||||
echo "ERROR: unknown option $1"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$1" ]; then echo "ERROR: specify version number like this: $0 1994.09.06"; exit 1; fi
|
||||
version="$1"
|
||||
major_version=$(echo "$version" | sed -n 's#^\([0-9]*\.[0-9]*\.[0-9]*\).*#\1#p')
|
||||
if test "$major_version" '!=' "$(date '+%Y.%m.%d')"; then
|
||||
echo "$version does not start with today's date!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -z "`git tag | grep "$version"`" ]; then echo 'ERROR: version already present'; exit 1; fi
|
||||
if [ ! -z "`git status --porcelain | grep -v CHANGELOG`" ]; then echo 'ERROR: the working directory is not clean; commit or stash changes'; exit 1; fi
|
||||
useless_files=$(find youtube_dl -type f -not -name '*.py')
|
||||
if [ ! -z "$useless_files" ]; then echo "ERROR: Non-.py files in youtube_dl: $useless_files"; exit 1; fi
|
||||
if [ ! -f "updates_key.pem" ]; then echo 'ERROR: updates_key.pem missing'; exit 1; fi
|
||||
if ! type pandoc >/dev/null 2>/dev/null; then echo 'ERROR: pandoc is missing'; exit 1; fi
|
||||
if ! python3 -c 'import rsa' 2>/dev/null; then echo 'ERROR: python3-rsa is missing'; exit 1; fi
|
||||
if ! python3 -c 'import wheel' 2>/dev/null; then echo 'ERROR: wheel is missing'; exit 1; fi
|
||||
|
||||
read -p "Is ChangeLog up to date? (y/n) " -n 1
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
||||
|
||||
/bin/echo -e "\n### First of all, testing..."
|
||||
make clean
|
||||
if $skip_tests ; then
|
||||
echo 'SKIPPING TESTS'
|
||||
else
|
||||
nosetests --verbose --with-coverage --cover-package=youtube_dl --cover-html test --stop || exit 1
|
||||
fi
|
||||
|
||||
/bin/echo -e "\n### Changing version in version.py..."
|
||||
sed -i "s/__version__ = '.*'/__version__ = '$version'/" youtube_dl/version.py
|
||||
|
||||
/bin/echo -e "\n### Changing version in ChangeLog..."
|
||||
sed -i "s/<unreleased>/$version/" ChangeLog
|
||||
|
||||
/bin/echo -e "\n### Committing documentation, templates and youtube_dl/version.py..."
|
||||
make README.md CONTRIBUTING.md issuetemplates supportedsites
|
||||
git add README.md CONTRIBUTING.md .github/ISSUE_TEMPLATE/1_broken_site.md .github/ISSUE_TEMPLATE/2_site_support_request.md .github/ISSUE_TEMPLATE/3_site_feature_request.md .github/ISSUE_TEMPLATE/4_bug_report.md .github/ISSUE_TEMPLATE/5_feature_request.md .github/ISSUE_TEMPLATE/6_question.md docs/supportedsites.md youtube_dl/version.py ChangeLog
|
||||
git commit $gpg_sign_commits -m "release $version"
|
||||
|
||||
/bin/echo -e "\n### Now tagging, signing and pushing..."
|
||||
git tag -s -m "Release $version" "$version"
|
||||
git show "$version"
|
||||
read -p "Is it good, can I push? (y/n) " -n 1
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
||||
echo
|
||||
MASTER=$(git rev-parse --abbrev-ref HEAD)
|
||||
git push origin $MASTER:master
|
||||
git push origin "$version"
|
||||
|
||||
/bin/echo -e "\n### OK, now it is time to build the binaries..."
|
||||
REV=$(git rev-parse HEAD)
|
||||
make youtube-dl youtube-dl.tar.gz
|
||||
read -p "VM running? (y/n) " -n 1
|
||||
wget "http://$buildserver/build/ytdl-org/youtube-dl/youtube-dl.exe?rev=$REV" -O youtube-dl.exe
|
||||
mkdir -p "build/$version"
|
||||
mv youtube-dl youtube-dl.exe "build/$version"
|
||||
mv youtube-dl.tar.gz "build/$version/youtube-dl-$version.tar.gz"
|
||||
RELEASE_FILES="youtube-dl youtube-dl.exe youtube-dl-$version.tar.gz"
|
||||
(cd build/$version/ && md5sum $RELEASE_FILES > MD5SUMS)
|
||||
(cd build/$version/ && sha1sum $RELEASE_FILES > SHA1SUMS)
|
||||
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
|
||||
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
|
||||
|
||||
/bin/echo -e "\n### Signing and uploading the new binaries to GitHub..."
|
||||
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
|
||||
|
||||
ROOT=$(pwd)
|
||||
python devscripts/create-github-release.py ChangeLog $version "$ROOT/build/$version"
|
||||
|
||||
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
|
||||
|
||||
/bin/echo -e "\n### Now switching to gh-pages..."
|
||||
git clone --branch gh-pages --single-branch . build/gh-pages
|
||||
(
|
||||
set -e
|
||||
ORIGIN_URL=$(git config --get remote.origin.url)
|
||||
cd build/gh-pages
|
||||
"$ROOT/devscripts/gh-pages/add-version.py" $version
|
||||
"$ROOT/devscripts/gh-pages/update-feed.py"
|
||||
"$ROOT/devscripts/gh-pages/sign-versions.py" < "$ROOT/updates_key.pem"
|
||||
"$ROOT/devscripts/gh-pages/generate-download.py"
|
||||
"$ROOT/devscripts/gh-pages/update-copyright.py"
|
||||
"$ROOT/devscripts/gh-pages/update-sites.py"
|
||||
git add *.html *.html.in update
|
||||
git commit $gpg_sign_commits -m "release $version"
|
||||
git push "$ROOT" gh-pages
|
||||
git push "$ORIGIN_URL" gh-pages
|
||||
)
|
||||
rm -rf build
|
||||
|
||||
make pypi-files
|
||||
echo "Uploading to PyPi ..."
|
||||
python setup.py sdist bdist_wheel upload
|
||||
make clean
|
||||
|
||||
/bin/echo -e "\n### DONE!"
|
46
devscripts/update_version.py
Executable file
46
devscripts/update_version.py
Executable file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime as dt
|
||||
import sys
|
||||
|
||||
|
||||
VERSION_FILE_FORMAT = '''\
|
||||
# Autogenerated by devscripts/update_version.py
|
||||
from __future__ import unicode_literals
|
||||
|
||||
__version__ = {!r}
|
||||
'''
|
||||
|
||||
|
||||
def split_version(version):
|
||||
if '.' not in version:
|
||||
return None, version
|
||||
|
||||
version_list = version.split('.')
|
||||
version = '.'.join(version_list[:3])
|
||||
revision = version_list[3] if len(version_list) > 3 else None
|
||||
|
||||
return version, revision
|
||||
|
||||
|
||||
with open('youtube_dl/version.py', 'r') as f:
|
||||
exec(compile(f.read(), 'youtube_dl/version.py', 'exec'))
|
||||
|
||||
old_ver, old_rev = split_version(locals()['__version__'])
|
||||
ver, rev = split_version(sys.argv[1]) if len(sys.argv) > 1 else (None, None)
|
||||
|
||||
if not ver:
|
||||
ver = (
|
||||
dt.datetime.now(dt.timezone.utc) if sys.version_info >= (3,)
|
||||
else dt.datetime.utcnow()).strftime('%Y.%m.%d')
|
||||
if not rev and old_ver == ver:
|
||||
rev = str(int(old_rev or 0) + 1)
|
||||
|
||||
if rev:
|
||||
ver = ver + '.' + rev
|
||||
|
||||
with open('youtube_dl/version.py', 'w') as f:
|
||||
f.write(VERSION_FILE_FORMAT.format(ver))
|
||||
|
||||
print(ver)
|
@ -1,56 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run with as parameter a setup.py that works in the current directory
|
||||
# e.g. no os.chdir()
|
||||
# It will run twice, the first time will crash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
if [ ! -d wine-py2exe ]; then
|
||||
|
||||
sudo apt-get install wine1.3 axel bsdiff
|
||||
|
||||
mkdir wine-py2exe
|
||||
cd wine-py2exe
|
||||
export WINEPREFIX=`pwd`
|
||||
|
||||
axel -a "http://www.python.org/ftp/python/2.7/python-2.7.msi"
|
||||
axel -a "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe"
|
||||
#axel -a "http://winetricks.org/winetricks"
|
||||
|
||||
# http://appdb.winehq.org/objectManager.php?sClass=version&iId=21957
|
||||
echo "Follow python setup on screen"
|
||||
wine msiexec /i python-2.7.msi
|
||||
|
||||
echo "Follow py2exe setup on screen"
|
||||
wine py2exe-0.6.9.win32-py2.7.exe
|
||||
|
||||
#echo "Follow Microsoft Visual C++ 2008 Redistributable Package setup on screen"
|
||||
#bash winetricks vcrun2008
|
||||
|
||||
rm py2exe-0.6.9.win32-py2.7.exe
|
||||
rm python-2.7.msi
|
||||
#rm winetricks
|
||||
|
||||
# http://bugs.winehq.org/show_bug.cgi?id=3591
|
||||
|
||||
mv drive_c/Python27/Lib/site-packages/py2exe/run.exe drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup
|
||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run.exe "$SCRIPT_DIR/SizeOfImage.patch"
|
||||
mv drive_c/Python27/Lib/site-packages/py2exe/run_w.exe drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup
|
||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run_w.exe "$SCRIPT_DIR/SizeOfImage_w.patch"
|
||||
|
||||
cd -
|
||||
|
||||
else
|
||||
|
||||
export WINEPREFIX="$( cd wine-py2exe && pwd )"
|
||||
|
||||
fi
|
||||
|
||||
wine "C:\\Python27\\python.exe" "$1" py2exe > "py2exe.log" 2>&1 || true
|
||||
echo '# Copying python27.dll' >> "py2exe.log"
|
||||
cp "$WINEPREFIX/drive_c/windows/system32/python27.dll" build/bdist.win32/winexe/bundle-2.7/
|
||||
wine "C:\\Python27\\python.exe" "$1" py2exe >> "py2exe.log" 2>&1
|
||||
|
@ -11,6 +11,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import math
|
||||
import re
|
||||
import time
|
||||
|
||||
from youtube_dl.compat import compat_str as str
|
||||
from youtube_dl.jsinterp import JS_Undefined, JSInterpreter
|
||||
@ -208,6 +209,34 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
self._test(jsi, 86000, args=['12/31/1969 18:01:26 MDT'])
|
||||
# epoch 0
|
||||
self._test(jsi, 0, args=['1 January 1970 00:00:00 UTC'])
|
||||
# undefined
|
||||
self._test(jsi, NaN, args=[JS_Undefined])
|
||||
# y,m,d, ... - may fail with older dates lacking DST data
|
||||
jsi = JSInterpreter(
|
||||
'function f() { return new Date(%s); }'
|
||||
% ('2024, 5, 29, 2, 52, 12, 42',))
|
||||
self._test(jsi, (
|
||||
1719625932042 # UK value
|
||||
+ (
|
||||
+ 3600 # back to GMT
|
||||
+ (time.altzone if time.daylight # host's DST
|
||||
else time.timezone)
|
||||
) * 1000))
|
||||
# no arg
|
||||
self.assertAlmostEqual(JSInterpreter(
|
||||
'function f() { return new Date() - 0; }').call_function('f'),
|
||||
time.time() * 1000, delta=100)
|
||||
# Date.now()
|
||||
self.assertAlmostEqual(JSInterpreter(
|
||||
'function f() { return Date.now(); }').call_function('f'),
|
||||
time.time() * 1000, delta=100)
|
||||
# Date.parse()
|
||||
jsi = JSInterpreter('function f(dt) { return Date.parse(dt); }')
|
||||
self._test(jsi, 0, args=['1 January 1970 00:00:00 UTC'])
|
||||
# Date.UTC()
|
||||
jsi = JSInterpreter('function f() { return Date.UTC(%s); }'
|
||||
% ('1970, 0, 1, 0, 0, 0, 0',))
|
||||
self._test(jsi, 0)
|
||||
|
||||
def test_call(self):
|
||||
jsi = JSInterpreter('''
|
||||
@ -463,6 +492,14 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
self._test('function f(){return NaN << 42}', 0)
|
||||
self._test('function f(){return "21.9" << 1}', 42)
|
||||
self._test('function f(){return 21 << 4294967297}', 42)
|
||||
self._test('function f(){return true << "5";}', 32)
|
||||
self._test('function f(){return true << true;}', 2)
|
||||
self._test('function f(){return "19" & "21.9";}', 17)
|
||||
self._test('function f(){return "19" & false;}', 0)
|
||||
self._test('function f(){return "11.0" >> "2.1";}', 2)
|
||||
self._test('function f(){return 5 ^ 9;}', 12)
|
||||
self._test('function f(){return 0.0 << NaN}', 0)
|
||||
self._test('function f(){return null << undefined}', 0)
|
||||
|
||||
def test_negative(self):
|
||||
self._test('function f(){return 2 * -2.0 ;}', -4)
|
||||
|
@ -223,6 +223,18 @@ _NSIG_TESTS = [
|
||||
'https://www.youtube.com/s/player/9c6dfc4a/player_ias.vflset/en_US/base.js',
|
||||
'jbu7ylIosQHyJyJV', 'uwI0ESiynAmhNg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/f6e09c70/player_ias.vflset/en_US/base.js',
|
||||
'W9HJZKktxuYoDTqW', 'jHbbkcaxm54',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/f6e09c70/player_ias_tce.vflset/en_US/base.js',
|
||||
'W9HJZKktxuYoDTqW', 'jHbbkcaxm54',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/91201489/player_ias_tce.vflset/en_US/base.js',
|
||||
'W9HJZKktxuYoDTqW', 'U48vOZHaeYS6vO',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@ -284,7 +296,7 @@ def t_factory(name, sig_func, url_pattern):
|
||||
|
||||
|
||||
def signature(jscode, sig_input):
|
||||
func = YoutubeIE(FakeYDL())._parse_sig_js(jscode)
|
||||
func = YoutubeIE(FakeYDL({'cachedir': False}))._parse_sig_js(jscode)
|
||||
src_sig = (
|
||||
compat_str(string.printable[:sig_input])
|
||||
if isinstance(sig_input, int) else sig_input)
|
||||
@ -292,9 +304,10 @@ def signature(jscode, sig_input):
|
||||
|
||||
|
||||
def n_sig(jscode, sig_input):
|
||||
funcname = YoutubeIE(FakeYDL())._extract_n_function_name(jscode)
|
||||
return JSInterpreter(jscode).call_function(
|
||||
funcname, sig_input, _ytdl_do_not_return=sig_input)
|
||||
ie = YoutubeIE(FakeYDL({'cachedir': False}))
|
||||
jsi = JSInterpreter(jscode)
|
||||
jsi, _, func_code = ie._extract_n_function_code_jsi(sig_input, jsi)
|
||||
return ie._extract_n_function_from_code(jsi, func_code)(sig_input)
|
||||
|
||||
|
||||
make_sig_test = t_factory(
|
||||
|
@ -18,7 +18,7 @@ from .compat import (
|
||||
compat_getpass,
|
||||
compat_register_utf8,
|
||||
compat_shlex_split,
|
||||
workaround_optparse_bug9161,
|
||||
_workaround_optparse_bug9161,
|
||||
)
|
||||
from .utils import (
|
||||
_UnsafeExtensionError,
|
||||
@ -50,7 +50,7 @@ def _real_main(argv=None):
|
||||
# Compatibility fix for Windows
|
||||
compat_register_utf8()
|
||||
|
||||
workaround_optparse_bug9161()
|
||||
_workaround_optparse_bug9161()
|
||||
|
||||
setproctitle('youtube-dl')
|
||||
|
||||
|
@ -16,7 +16,6 @@ import os
|
||||
import platform
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import socket
|
||||
import struct
|
||||
import subprocess
|
||||
@ -24,11 +23,15 @@ import sys
|
||||
import types
|
||||
import xml.etree.ElementTree
|
||||
|
||||
_IDENTITY = lambda x: x
|
||||
|
||||
# naming convention
|
||||
# 'compat_' + Python3_name.replace('.', '_')
|
||||
# other aliases exist for convenience and/or legacy
|
||||
# wrap disposable test values in type() to reclaim storage
|
||||
|
||||
# deal with critical unicode/str things first
|
||||
# deal with critical unicode/str things first:
|
||||
# compat_str, compat_basestring, compat_chr
|
||||
try:
|
||||
# Python 2
|
||||
compat_str, compat_basestring, compat_chr = (
|
||||
@ -39,18 +42,23 @@ except NameError:
|
||||
str, (str, bytes), chr
|
||||
)
|
||||
|
||||
# casefold
|
||||
|
||||
# compat_casefold
|
||||
try:
|
||||
compat_str.casefold
|
||||
compat_casefold = lambda s: s.casefold()
|
||||
except AttributeError:
|
||||
from .casefold import _casefold as compat_casefold
|
||||
|
||||
|
||||
# compat_collections_abc
|
||||
try:
|
||||
import collections.abc as compat_collections_abc
|
||||
except ImportError:
|
||||
import collections as compat_collections_abc
|
||||
|
||||
|
||||
# compat_urllib_request
|
||||
try:
|
||||
import urllib.request as compat_urllib_request
|
||||
except ImportError: # Python 2
|
||||
@ -79,11 +87,15 @@ except TypeError:
|
||||
_add_init_method_arg(compat_urllib_request.Request)
|
||||
del _add_init_method_arg
|
||||
|
||||
|
||||
# compat_urllib_error
|
||||
try:
|
||||
import urllib.error as compat_urllib_error
|
||||
except ImportError: # Python 2
|
||||
import urllib2 as compat_urllib_error
|
||||
|
||||
|
||||
# compat_urllib_parse
|
||||
try:
|
||||
import urllib.parse as compat_urllib_parse
|
||||
except ImportError: # Python 2
|
||||
@ -98,17 +110,23 @@ except ImportError: # Python 2
|
||||
compat_urlparse = compat_urllib_parse
|
||||
compat_urllib_parse_urlparse = compat_urllib_parse.urlparse
|
||||
|
||||
|
||||
# compat_urllib_response
|
||||
try:
|
||||
import urllib.response as compat_urllib_response
|
||||
except ImportError: # Python 2
|
||||
import urllib as compat_urllib_response
|
||||
|
||||
|
||||
# compat_urllib_response.addinfourl
|
||||
try:
|
||||
compat_urllib_response.addinfourl.status
|
||||
except AttributeError:
|
||||
# .getcode() is deprecated in Py 3.
|
||||
compat_urllib_response.addinfourl.status = property(lambda self: self.getcode())
|
||||
|
||||
|
||||
# compat_http_cookiejar
|
||||
try:
|
||||
import http.cookiejar as compat_cookiejar
|
||||
except ImportError: # Python 2
|
||||
@ -127,12 +145,16 @@ else:
|
||||
compat_cookiejar_Cookie = compat_cookiejar.Cookie
|
||||
compat_http_cookiejar_Cookie = compat_cookiejar_Cookie
|
||||
|
||||
|
||||
# compat_http_cookies
|
||||
try:
|
||||
import http.cookies as compat_cookies
|
||||
except ImportError: # Python 2
|
||||
import Cookie as compat_cookies
|
||||
compat_http_cookies = compat_cookies
|
||||
|
||||
|
||||
# compat_http_cookies_SimpleCookie
|
||||
if sys.version_info[0] == 2 or sys.version_info < (3, 3):
|
||||
class compat_cookies_SimpleCookie(compat_cookies.SimpleCookie):
|
||||
def load(self, rawdata):
|
||||
@ -155,11 +177,15 @@ else:
|
||||
compat_cookies_SimpleCookie = compat_cookies.SimpleCookie
|
||||
compat_http_cookies_SimpleCookie = compat_cookies_SimpleCookie
|
||||
|
||||
|
||||
# compat_html_entities, probably useless now
|
||||
try:
|
||||
import html.entities as compat_html_entities
|
||||
except ImportError: # Python 2
|
||||
import htmlentitydefs as compat_html_entities
|
||||
|
||||
|
||||
# compat_html_entities_html5
|
||||
try: # Python >= 3.3
|
||||
compat_html_entities_html5 = compat_html_entities.html5
|
||||
except AttributeError:
|
||||
@ -2408,18 +2434,24 @@ except AttributeError:
|
||||
# Py < 3.1
|
||||
compat_http_client.HTTPResponse.getcode = lambda self: self.status
|
||||
|
||||
|
||||
# compat_urllib_HTTPError
|
||||
try:
|
||||
from urllib.error import HTTPError as compat_HTTPError
|
||||
except ImportError: # Python 2
|
||||
from urllib2 import HTTPError as compat_HTTPError
|
||||
compat_urllib_HTTPError = compat_HTTPError
|
||||
|
||||
|
||||
# compat_urllib_request_urlretrieve
|
||||
try:
|
||||
from urllib.request import urlretrieve as compat_urlretrieve
|
||||
except ImportError: # Python 2
|
||||
from urllib import urlretrieve as compat_urlretrieve
|
||||
compat_urllib_request_urlretrieve = compat_urlretrieve
|
||||
|
||||
|
||||
# compat_html_parser_HTMLParser, compat_html_parser_HTMLParseError
|
||||
try:
|
||||
from HTMLParser import (
|
||||
HTMLParser as compat_HTMLParser,
|
||||
@ -2432,22 +2464,33 @@ except ImportError: # Python 3
|
||||
# HTMLParseError was deprecated in Python 3.3 and removed in
|
||||
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
||||
# and uniform cross-version exception handling
|
||||
|
||||
class compat_HTMLParseError(Exception):
|
||||
pass
|
||||
|
||||
compat_html_parser_HTMLParser = compat_HTMLParser
|
||||
compat_html_parser_HTMLParseError = compat_HTMLParseError
|
||||
|
||||
|
||||
# compat_subprocess_get_DEVNULL
|
||||
try:
|
||||
_DEVNULL = subprocess.DEVNULL
|
||||
compat_subprocess_get_DEVNULL = lambda: _DEVNULL
|
||||
except AttributeError:
|
||||
compat_subprocess_get_DEVNULL = lambda: open(os.path.devnull, 'w')
|
||||
|
||||
|
||||
# compat_http_server
|
||||
try:
|
||||
import http.server as compat_http_server
|
||||
except ImportError:
|
||||
import BaseHTTPServer as compat_http_server
|
||||
|
||||
|
||||
# compat_urllib_parse_unquote_to_bytes,
|
||||
# compat_urllib_parse_unquote, compat_urllib_parse_unquote_plus,
|
||||
# compat_urllib_parse_urlencode,
|
||||
# compat_urllib_parse_parse_qs
|
||||
try:
|
||||
from urllib.parse import unquote_to_bytes as compat_urllib_parse_unquote_to_bytes
|
||||
from urllib.parse import unquote as compat_urllib_parse_unquote
|
||||
@ -2598,6 +2641,8 @@ except ImportError: # Python 2
|
||||
|
||||
compat_urllib_parse_parse_qs = compat_parse_qs
|
||||
|
||||
|
||||
# compat_urllib_request_DataHandler
|
||||
try:
|
||||
from urllib.request import DataHandler as compat_urllib_request_DataHandler
|
||||
except ImportError: # Python < 3.4
|
||||
@ -2632,16 +2677,20 @@ except ImportError: # Python < 3.4
|
||||
|
||||
return compat_urllib_response.addinfourl(io.BytesIO(data), headers, url)
|
||||
|
||||
|
||||
# compat_xml_etree_ElementTree_ParseError
|
||||
try:
|
||||
from xml.etree.ElementTree import ParseError as compat_xml_parse_error
|
||||
except ImportError: # Python 2.6
|
||||
from xml.parsers.expat import ExpatError as compat_xml_parse_error
|
||||
compat_xml_etree_ElementTree_ParseError = compat_xml_parse_error
|
||||
|
||||
etree = xml.etree.ElementTree
|
||||
|
||||
# compat_xml_etree_ElementTree_Element
|
||||
_etree = xml.etree.ElementTree
|
||||
|
||||
|
||||
class _TreeBuilder(etree.TreeBuilder):
|
||||
class _TreeBuilder(_etree.TreeBuilder):
|
||||
def doctype(self, name, pubid, system):
|
||||
pass
|
||||
|
||||
@ -2650,7 +2699,7 @@ try:
|
||||
# xml.etree.ElementTree.Element is a method in Python <=2.6 and
|
||||
# the following will crash with:
|
||||
# TypeError: isinstance() arg 2 must be a class, type, or tuple of classes and types
|
||||
isinstance(None, etree.Element)
|
||||
isinstance(None, _etree.Element)
|
||||
from xml.etree.ElementTree import Element as compat_etree_Element
|
||||
except TypeError: # Python <=2.6
|
||||
from xml.etree.ElementTree import _ElementInterface as compat_etree_Element
|
||||
@ -2658,12 +2707,12 @@ compat_xml_etree_ElementTree_Element = compat_etree_Element
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
def compat_etree_fromstring(text):
|
||||
return etree.XML(text, parser=etree.XMLParser(target=_TreeBuilder()))
|
||||
return _etree.XML(text, parser=_etree.XMLParser(target=_TreeBuilder()))
|
||||
else:
|
||||
# python 2.x tries to encode unicode strings with ascii (see the
|
||||
# XMLParser._fixtext method)
|
||||
try:
|
||||
_etree_iter = etree.Element.iter
|
||||
_etree_iter = _etree.Element.iter
|
||||
except AttributeError: # Python <=2.6
|
||||
def _etree_iter(root):
|
||||
for el in root.findall('*'):
|
||||
@ -2675,27 +2724,29 @@ else:
|
||||
# 2.7 source
|
||||
def _XML(text, parser=None):
|
||||
if not parser:
|
||||
parser = etree.XMLParser(target=_TreeBuilder())
|
||||
parser = _etree.XMLParser(target=_TreeBuilder())
|
||||
parser.feed(text)
|
||||
return parser.close()
|
||||
|
||||
def _element_factory(*args, **kwargs):
|
||||
el = etree.Element(*args, **kwargs)
|
||||
el = _etree.Element(*args, **kwargs)
|
||||
for k, v in el.items():
|
||||
if isinstance(v, bytes):
|
||||
el.set(k, v.decode('utf-8'))
|
||||
return el
|
||||
|
||||
def compat_etree_fromstring(text):
|
||||
doc = _XML(text, parser=etree.XMLParser(target=_TreeBuilder(element_factory=_element_factory)))
|
||||
doc = _XML(text, parser=_etree.XMLParser(target=_TreeBuilder(element_factory=_element_factory)))
|
||||
for el in _etree_iter(doc):
|
||||
if el.text is not None and isinstance(el.text, bytes):
|
||||
el.text = el.text.decode('utf-8')
|
||||
return doc
|
||||
|
||||
if hasattr(etree, 'register_namespace'):
|
||||
compat_etree_register_namespace = etree.register_namespace
|
||||
else:
|
||||
|
||||
# compat_xml_etree_register_namespace
|
||||
try:
|
||||
compat_etree_register_namespace = _etree.register_namespace
|
||||
except AttributeError:
|
||||
def compat_etree_register_namespace(prefix, uri):
|
||||
"""Register a namespace prefix.
|
||||
The registry is global, and any existing mapping for either the
|
||||
@ -2704,14 +2755,16 @@ else:
|
||||
attributes in this namespace will be serialized with prefix if possible.
|
||||
ValueError is raised if prefix is reserved or is invalid.
|
||||
"""
|
||||
if re.match(r"ns\d+$", prefix):
|
||||
raise ValueError("Prefix format reserved for internal use")
|
||||
for k, v in list(etree._namespace_map.items()):
|
||||
if re.match(r'ns\d+$', prefix):
|
||||
raise ValueError('Prefix format reserved for internal use')
|
||||
for k, v in list(_etree._namespace_map.items()):
|
||||
if k == uri or v == prefix:
|
||||
del etree._namespace_map[k]
|
||||
etree._namespace_map[uri] = prefix
|
||||
del _etree._namespace_map[k]
|
||||
_etree._namespace_map[uri] = prefix
|
||||
compat_xml_etree_register_namespace = compat_etree_register_namespace
|
||||
|
||||
|
||||
# compat_xpath, compat_etree_iterfind
|
||||
if sys.version_info < (2, 7):
|
||||
# Here comes the crazy part: In 2.6, if the xpath is a unicode,
|
||||
# .//node does not match if a node is a direct child of . !
|
||||
@ -2898,7 +2951,6 @@ if sys.version_info < (2, 7):
|
||||
def __init__(self, root):
|
||||
self.root = root
|
||||
|
||||
##
|
||||
# Generate all matching objects.
|
||||
|
||||
def compat_etree_iterfind(elem, path, namespaces=None):
|
||||
@ -2933,13 +2985,15 @@ if sys.version_info < (2, 7):
|
||||
|
||||
|
||||
else:
|
||||
compat_xpath = lambda xpath: xpath
|
||||
compat_etree_iterfind = lambda element, match: element.iterfind(match)
|
||||
compat_xpath = _IDENTITY
|
||||
|
||||
|
||||
# compat_os_name
|
||||
compat_os_name = os._name if os.name == 'java' else os.name
|
||||
|
||||
|
||||
# compat_shlex_quote
|
||||
if compat_os_name == 'nt':
|
||||
def compat_shlex_quote(s):
|
||||
return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"')
|
||||
@ -2954,6 +3008,7 @@ else:
|
||||
return "'" + s.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
|
||||
# compat_shlex.split
|
||||
try:
|
||||
args = shlex.split('中文')
|
||||
assert (isinstance(args, list)
|
||||
@ -2969,6 +3024,7 @@ except (AssertionError, UnicodeEncodeError):
|
||||
return list(map(lambda s: s.decode('utf-8'), shlex.split(s, comments, posix)))
|
||||
|
||||
|
||||
# compat_ord
|
||||
def compat_ord(c):
|
||||
if isinstance(c, int):
|
||||
return c
|
||||
@ -2976,6 +3032,7 @@ def compat_ord(c):
|
||||
return ord(c)
|
||||
|
||||
|
||||
# compat_getenv, compat_os_path_expanduser, compat_setenv
|
||||
if sys.version_info >= (3, 0):
|
||||
compat_getenv = os.getenv
|
||||
compat_expanduser = os.path.expanduser
|
||||
@ -3063,6 +3120,7 @@ else:
|
||||
compat_os_path_expanduser = compat_expanduser
|
||||
|
||||
|
||||
# compat_os_path_realpath
|
||||
if compat_os_name == 'nt' and sys.version_info < (3, 8):
|
||||
# os.path.realpath on Windows does not follow symbolic links
|
||||
# prior to Python 3.8 (see https://bugs.python.org/issue9949)
|
||||
@ -3076,6 +3134,7 @@ else:
|
||||
compat_os_path_realpath = compat_realpath
|
||||
|
||||
|
||||
# compat_print
|
||||
if sys.version_info < (3, 0):
|
||||
def compat_print(s):
|
||||
from .utils import preferredencoding
|
||||
@ -3086,6 +3145,7 @@ else:
|
||||
print(s)
|
||||
|
||||
|
||||
# compat_getpass_getpass
|
||||
if sys.version_info < (3, 0) and sys.platform == 'win32':
|
||||
def compat_getpass(prompt, *args, **kwargs):
|
||||
if isinstance(prompt, compat_str):
|
||||
@ -3098,22 +3158,22 @@ else:
|
||||
compat_getpass_getpass = compat_getpass
|
||||
|
||||
|
||||
# compat_input
|
||||
try:
|
||||
compat_input = raw_input
|
||||
except NameError: # Python 3
|
||||
compat_input = input
|
||||
|
||||
|
||||
# compat_kwargs
|
||||
# Python < 2.6.5 require kwargs to be bytes
|
||||
try:
|
||||
def _testfunc(x):
|
||||
pass
|
||||
_testfunc(**{'x': 0})
|
||||
(lambda x: x)(**{'x': 0})
|
||||
except TypeError:
|
||||
def compat_kwargs(kwargs):
|
||||
return dict((bytes(k), v) for k, v in kwargs.items())
|
||||
else:
|
||||
compat_kwargs = lambda kwargs: kwargs
|
||||
compat_kwargs = _IDENTITY
|
||||
|
||||
|
||||
# compat_numeric_types
|
||||
@ -3132,6 +3192,8 @@ except NameError: # Python 3
|
||||
# compat_int
|
||||
compat_int = compat_integer_types[-1]
|
||||
|
||||
|
||||
# compat_socket_create_connection
|
||||
if sys.version_info < (2, 7):
|
||||
def compat_socket_create_connection(address, timeout, source_address=None):
|
||||
host, port = address
|
||||
@ -3158,6 +3220,7 @@ else:
|
||||
compat_socket_create_connection = socket.create_connection
|
||||
|
||||
|
||||
# compat_contextlib_suppress
|
||||
try:
|
||||
from contextlib import suppress as compat_contextlib_suppress
|
||||
except ImportError:
|
||||
@ -3200,12 +3263,12 @@ except AttributeError:
|
||||
# repeated .close() is OK, but just in case
|
||||
with compat_contextlib_suppress(EnvironmentError):
|
||||
f.close()
|
||||
popen.wait()
|
||||
popen.wait()
|
||||
|
||||
|
||||
# Fix https://github.com/ytdl-org/youtube-dl/issues/4223
|
||||
# See http://bugs.python.org/issue9161 for what is broken
|
||||
def workaround_optparse_bug9161():
|
||||
def _workaround_optparse_bug9161():
|
||||
op = optparse.OptionParser()
|
||||
og = optparse.OptionGroup(op, 'foo')
|
||||
try:
|
||||
@ -3224,9 +3287,10 @@ def workaround_optparse_bug9161():
|
||||
optparse.OptionGroup.add_option = _compat_add_option
|
||||
|
||||
|
||||
if hasattr(shutil, 'get_terminal_size'): # Python >= 3.3
|
||||
compat_get_terminal_size = shutil.get_terminal_size
|
||||
else:
|
||||
# compat_shutil_get_terminal_size
|
||||
try:
|
||||
from shutil import get_terminal_size as compat_get_terminal_size # Python >= 3.3
|
||||
except ImportError:
|
||||
_terminal_size = collections.namedtuple('terminal_size', ['columns', 'lines'])
|
||||
|
||||
def compat_get_terminal_size(fallback=(80, 24)):
|
||||
@ -3256,27 +3320,33 @@ else:
|
||||
columns = _columns
|
||||
if lines is None or lines <= 0:
|
||||
lines = _lines
|
||||
|
||||
return _terminal_size(columns, lines)
|
||||
|
||||
compat_shutil_get_terminal_size = compat_get_terminal_size
|
||||
|
||||
|
||||
# compat_itertools_count
|
||||
try:
|
||||
itertools.count(start=0, step=1)
|
||||
type(itertools.count(start=0, step=1))
|
||||
compat_itertools_count = itertools.count
|
||||
except TypeError: # Python 2.6
|
||||
except TypeError: # Python 2.6 lacks step
|
||||
def compat_itertools_count(start=0, step=1):
|
||||
while True:
|
||||
yield start
|
||||
start += step
|
||||
|
||||
|
||||
# compat_tokenize_tokenize
|
||||
if sys.version_info >= (3, 0):
|
||||
from tokenize import tokenize as compat_tokenize_tokenize
|
||||
else:
|
||||
from tokenize import generate_tokens as compat_tokenize_tokenize
|
||||
|
||||
|
||||
# compat_struct_pack, compat_struct_unpack, compat_Struct
|
||||
try:
|
||||
struct.pack('!I', 0)
|
||||
type(struct.pack('!I', 0))
|
||||
except TypeError:
|
||||
# In Python 2.6 and 2.7.x < 2.7.7, struct requires a bytes argument
|
||||
# See https://bugs.python.org/issue19099
|
||||
@ -3308,8 +3378,10 @@ else:
|
||||
compat_Struct = struct.Struct
|
||||
|
||||
|
||||
# compat_map/filter() returning an iterator, supposedly the
|
||||
# same versioning as for zip below
|
||||
# builtins returning an iterator
|
||||
|
||||
# compat_map, compat_filter
|
||||
# supposedly the same versioning as for zip below
|
||||
try:
|
||||
from future_builtins import map as compat_map
|
||||
except ImportError:
|
||||
@ -3326,6 +3398,7 @@ except ImportError:
|
||||
except ImportError:
|
||||
compat_filter = filter
|
||||
|
||||
# compat_zip
|
||||
try:
|
||||
from future_builtins import zip as compat_zip
|
||||
except ImportError: # not 2.6+ or is 3.x
|
||||
@ -3335,6 +3408,7 @@ except ImportError: # not 2.6+ or is 3.x
|
||||
compat_zip = zip
|
||||
|
||||
|
||||
# compat_itertools_zip_longest
|
||||
# method renamed between Py2/3
|
||||
try:
|
||||
from itertools import zip_longest as compat_itertools_zip_longest
|
||||
@ -3342,7 +3416,8 @@ except ImportError:
|
||||
from itertools import izip_longest as compat_itertools_zip_longest
|
||||
|
||||
|
||||
# new class in collections
|
||||
# compat_collections_chain_map
|
||||
# collections.ChainMap: new class
|
||||
try:
|
||||
from collections import ChainMap as compat_collections_chain_map
|
||||
# Py3.3's ChainMap is deficient
|
||||
@ -3398,19 +3473,22 @@ except ImportError:
|
||||
def new_child(self, m=None, **kwargs):
|
||||
m = m or {}
|
||||
m.update(kwargs)
|
||||
return compat_collections_chain_map(m, *self.maps)
|
||||
# support inheritance !
|
||||
return type(self)(m, *self.maps)
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
return compat_collections_chain_map(*(self.maps[1:]))
|
||||
return type(self)(*(self.maps[1:]))
|
||||
|
||||
|
||||
# compat_re_Pattern, compat_re_Match
|
||||
# Pythons disagree on the type of a pattern (RegexObject, _sre.SRE_Pattern, Pattern, ...?)
|
||||
compat_re_Pattern = type(re.compile(''))
|
||||
# and on the type of a match
|
||||
compat_re_Match = type(re.match('a', 'a'))
|
||||
|
||||
|
||||
# compat_base64_b64decode
|
||||
if sys.version_info < (3, 3):
|
||||
def compat_b64decode(s, *args, **kwargs):
|
||||
if isinstance(s, compat_str):
|
||||
@ -3422,6 +3500,7 @@ else:
|
||||
compat_base64_b64decode = compat_b64decode
|
||||
|
||||
|
||||
# compat_ctypes_WINFUNCTYPE
|
||||
if platform.python_implementation() == 'PyPy' and sys.pypy_version_info < (5, 4, 0):
|
||||
# PyPy2 prior to version 5.4.0 expects byte strings as Windows function
|
||||
# names, see the original PyPy issue [1] and the youtube-dl one [2].
|
||||
@ -3440,6 +3519,7 @@ else:
|
||||
return ctypes.WINFUNCTYPE(*args, **kwargs)
|
||||
|
||||
|
||||
# compat_open
|
||||
if sys.version_info < (3, 0):
|
||||
# open(file, mode='r', buffering=- 1, encoding=None, errors=None, newline=None, closefd=True) not: opener=None
|
||||
def compat_open(file_, *args, **kwargs):
|
||||
@ -3467,18 +3547,28 @@ except AttributeError:
|
||||
def compat_datetime_timedelta_total_seconds(td):
|
||||
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
|
||||
|
||||
|
||||
# optional decompression packages
|
||||
# compat_brotli
|
||||
# PyPi brotli package implements 'br' Content-Encoding
|
||||
try:
|
||||
import brotli as compat_brotli
|
||||
except ImportError:
|
||||
compat_brotli = None
|
||||
# compat_ncompress
|
||||
# PyPi ncompress package implements 'compress' Content-Encoding
|
||||
try:
|
||||
import ncompress as compat_ncompress
|
||||
except ImportError:
|
||||
compat_ncompress = None
|
||||
|
||||
# compat_zstandard
|
||||
# PyPi zstandard package implements 'zstd' Content-Encoding (RFC 8878 7.2)
|
||||
try:
|
||||
import zstandard as compat_zstandard
|
||||
except ImportError:
|
||||
compat_zstandard = None
|
||||
|
||||
|
||||
legacy = [
|
||||
'compat_HTMLParseError',
|
||||
@ -3495,6 +3585,7 @@ legacy = [
|
||||
'compat_getpass',
|
||||
'compat_parse_qs',
|
||||
'compat_realpath',
|
||||
'compat_shlex_split',
|
||||
'compat_urllib_parse_parse_qs',
|
||||
'compat_urllib_parse_unquote',
|
||||
'compat_urllib_parse_unquote_plus',
|
||||
@ -3508,8 +3599,6 @@ legacy = [
|
||||
|
||||
|
||||
__all__ = [
|
||||
'compat_html_parser_HTMLParseError',
|
||||
'compat_html_parser_HTMLParser',
|
||||
'compat_Struct',
|
||||
'compat_base64_b64decode',
|
||||
'compat_basestring',
|
||||
@ -3518,13 +3607,9 @@ __all__ = [
|
||||
'compat_chr',
|
||||
'compat_collections_abc',
|
||||
'compat_collections_chain_map',
|
||||
'compat_datetime_timedelta_total_seconds',
|
||||
'compat_http_cookiejar',
|
||||
'compat_http_cookiejar_Cookie',
|
||||
'compat_http_cookies',
|
||||
'compat_http_cookies_SimpleCookie',
|
||||
'compat_contextlib_suppress',
|
||||
'compat_ctypes_WINFUNCTYPE',
|
||||
'compat_datetime_timedelta_total_seconds',
|
||||
'compat_etree_fromstring',
|
||||
'compat_etree_iterfind',
|
||||
'compat_filter',
|
||||
@ -3533,6 +3618,12 @@ __all__ = [
|
||||
'compat_getpass_getpass',
|
||||
'compat_html_entities',
|
||||
'compat_html_entities_html5',
|
||||
'compat_html_parser_HTMLParseError',
|
||||
'compat_html_parser_HTMLParser',
|
||||
'compat_http_cookiejar',
|
||||
'compat_http_cookiejar_Cookie',
|
||||
'compat_http_cookies',
|
||||
'compat_http_cookies_SimpleCookie',
|
||||
'compat_http_client',
|
||||
'compat_http_server',
|
||||
'compat_input',
|
||||
@ -3555,7 +3646,7 @@ __all__ = [
|
||||
'compat_register_utf8',
|
||||
'compat_setenv',
|
||||
'compat_shlex_quote',
|
||||
'compat_shlex_split',
|
||||
'compat_shutil_get_terminal_size',
|
||||
'compat_socket_create_connection',
|
||||
'compat_str',
|
||||
'compat_struct_pack',
|
||||
@ -3575,5 +3666,5 @@ __all__ = [
|
||||
'compat_xml_etree_register_namespace',
|
||||
'compat_xpath',
|
||||
'compat_zip',
|
||||
'workaround_optparse_bug9161',
|
||||
'compat_zstandard',
|
||||
]
|
||||
|
@ -122,7 +122,8 @@ class YoutubeBaseInfoExtractor(InfoExtractor):
|
||||
'INNERTUBE_CONTEXT': {
|
||||
'client': {
|
||||
'clientName': 'TVHTML5',
|
||||
'clientVersion': '7.20241201.18.00',
|
||||
'clientVersion': '7.20250120.19.00',
|
||||
'userAgent': 'Mozilla/5.0 (ChromiumStylePlatform) Cobalt/Version',
|
||||
},
|
||||
},
|
||||
'INNERTUBE_CONTEXT_CLIENT_NAME': 7,
|
||||
@ -1851,12 +1852,22 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
|
||||
if func_code:
|
||||
return jsi, player_id, func_code
|
||||
return self._extract_n_function_code_jsi(video_id, jsi, player_id)
|
||||
|
||||
func_name = self._extract_n_function_name(jscode)
|
||||
def _extract_n_function_code_jsi(self, video_id, jsi, player_id=None):
|
||||
|
||||
var_ay = self._search_regex(
|
||||
r'(?:[;\s]|^)\s*(var\s*[\w$]+\s*=\s*"[^"]+"\s*\.\s*split\("\{"\))(?=\s*[,;])',
|
||||
jsi.code, 'useful values', default='')
|
||||
|
||||
func_name = self._extract_n_function_name(jsi.code)
|
||||
|
||||
func_code = jsi.extract_function_code(func_name)
|
||||
if var_ay:
|
||||
func_code = (func_code[0], ';\n'.join((var_ay, func_code[1])))
|
||||
|
||||
self.cache.store('youtube-nsig', player_id, func_code)
|
||||
if player_id:
|
||||
self.cache.store('youtube-nsig', player_id, func_code)
|
||||
return jsi, player_id, func_code
|
||||
|
||||
def _extract_n_function_from_code(self, jsi, func_code):
|
||||
|
@ -1,10 +1,12 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import calendar
|
||||
import itertools
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
import time
|
||||
|
||||
from functools import update_wrapper, wraps
|
||||
|
||||
@ -12,8 +14,10 @@ from .utils import (
|
||||
error_to_compat_str,
|
||||
ExtractorError,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
js_to_json,
|
||||
remove_quotes,
|
||||
str_or_none,
|
||||
unified_timestamp,
|
||||
variadic,
|
||||
write_string,
|
||||
@ -150,6 +154,7 @@ def _js_to_primitive(v):
|
||||
)
|
||||
|
||||
|
||||
# more exact: yt-dlp/yt-dlp#12110
|
||||
def _js_toString(v):
|
||||
return (
|
||||
'undefined' if v is JS_Undefined
|
||||
@ -158,7 +163,7 @@ def _js_toString(v):
|
||||
else 'null' if v is None
|
||||
# bool <= int: do this first
|
||||
else ('false', 'true')[v] if isinstance(v, bool)
|
||||
else '{0:.7f}'.format(v).rstrip('.0') if isinstance(v, compat_numeric_types)
|
||||
else re.sub(r'(?<=\d)\.?0*$', '', '{0:.7f}'.format(v)) if isinstance(v, compat_numeric_types)
|
||||
else _js_to_primitive(v))
|
||||
|
||||
|
||||
@ -404,6 +409,7 @@ class JSInterpreter(object):
|
||||
class Exception(ExtractorError):
|
||||
def __init__(self, msg, *args, **kwargs):
|
||||
expr = kwargs.pop('expr', None)
|
||||
msg = str_or_none(msg, default='"None"')
|
||||
if expr is not None:
|
||||
msg = '{0} in: {1!r:.100}'.format(msg.rstrip(), expr)
|
||||
super(JSInterpreter.Exception, self).__init__(msg, *args, **kwargs)
|
||||
@ -431,6 +437,7 @@ class JSInterpreter(object):
|
||||
flags, _ = self.regex_flags(flags)
|
||||
# First, avoid https://github.com/python/cpython/issues/74534
|
||||
self.__self = None
|
||||
pattern_txt = str_or_none(pattern_txt) or '(?:)'
|
||||
self.__pattern_txt = pattern_txt.replace('[[', r'[\[')
|
||||
self.__flags = flags
|
||||
|
||||
@ -475,6 +482,73 @@ class JSInterpreter(object):
|
||||
flags |= cls.RE_FLAGS[ch]
|
||||
return flags, expr[idx + 1:]
|
||||
|
||||
class JS_Date(object):
|
||||
_t = None
|
||||
|
||||
@staticmethod
|
||||
def __ymd_etc(*args, **kw_is_utc):
|
||||
# args: year, monthIndex, day, hours, minutes, seconds, milliseconds
|
||||
is_utc = kw_is_utc.get('is_utc', False)
|
||||
|
||||
args = list(args[:7])
|
||||
args += [0] * (9 - len(args))
|
||||
args[1] += 1 # month 0..11 -> 1..12
|
||||
ms = args[6]
|
||||
for i in range(6, 9):
|
||||
args[i] = -1 # don't know
|
||||
if is_utc:
|
||||
args[-1] = 1
|
||||
# TODO: [MDN] When a segment overflows or underflows its expected
|
||||
# range, it usually "carries over to" or "borrows from" the higher segment.
|
||||
try:
|
||||
mktime = calendar.timegm if is_utc else time.mktime
|
||||
return mktime(time.struct_time(args)) * 1000 + ms
|
||||
except (OverflowError, ValueError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def UTC(cls, *args):
|
||||
t = cls.__ymd_etc(*args, is_utc=True)
|
||||
return _NaN if t is None else t
|
||||
|
||||
@staticmethod
|
||||
def parse(date_str, **kw_is_raw):
|
||||
is_raw = kw_is_raw.get('is_raw', False)
|
||||
|
||||
t = unified_timestamp(str_or_none(date_str), False)
|
||||
return int(t * 1000) if t is not None else t if is_raw else _NaN
|
||||
|
||||
@staticmethod
|
||||
def now(**kw_is_raw):
|
||||
is_raw = kw_is_raw.get('is_raw', False)
|
||||
|
||||
t = time.time()
|
||||
return int(t * 1000) if t is not None else t if is_raw else _NaN
|
||||
|
||||
def __init__(self, *args):
|
||||
if not args:
|
||||
args = [self.now(is_raw=True)]
|
||||
if len(args) == 1:
|
||||
if isinstance(args[0], JSInterpreter.JS_Date):
|
||||
self._t = int_or_none(args[0].valueOf(), default=None)
|
||||
else:
|
||||
arg_type = _js_typeof(args[0])
|
||||
if arg_type == 'string':
|
||||
self._t = self.parse(args[0], is_raw=True)
|
||||
elif arg_type == 'number':
|
||||
self._t = int(args[0])
|
||||
else:
|
||||
self._t = self.__ymd_etc(*args)
|
||||
|
||||
def toString(self):
|
||||
try:
|
||||
return time.strftime('%a %b %0d %Y %H:%M:%S %Z%z', self._t).rstrip()
|
||||
except TypeError:
|
||||
return "Invalid Date"
|
||||
|
||||
def valueOf(self):
|
||||
return _NaN if self._t is None else self._t
|
||||
|
||||
@classmethod
|
||||
def __op_chars(cls):
|
||||
op_chars = set(';,[')
|
||||
@ -599,14 +673,15 @@ class JSInterpreter(object):
|
||||
except Exception as e:
|
||||
raise self.Exception('Failed to evaluate {left_val!r:.50} {op} {right_val!r:.50}'.format(**locals()), expr, cause=e)
|
||||
|
||||
def _index(self, obj, idx, allow_undefined=True):
|
||||
def _index(self, obj, idx, allow_undefined=None):
|
||||
if idx == 'length' and isinstance(obj, list):
|
||||
return len(obj)
|
||||
try:
|
||||
return obj[int(idx)] if isinstance(obj, list) else obj[compat_str(idx)]
|
||||
except (TypeError, KeyError, IndexError) as e:
|
||||
if allow_undefined:
|
||||
# when is not allowed?
|
||||
# allow_undefined is None gives correct behaviour
|
||||
if allow_undefined or (
|
||||
allow_undefined is None and not isinstance(e, TypeError)):
|
||||
return JS_Undefined
|
||||
raise self.Exception('Cannot get index {idx!r:.100}'.format(**locals()), expr=repr(obj), cause=e)
|
||||
|
||||
@ -715,7 +790,7 @@ class JSInterpreter(object):
|
||||
|
||||
new_kw, _, obj = expr.partition('new ')
|
||||
if not new_kw:
|
||||
for klass, konstr in (('Date', lambda x: int(unified_timestamp(x, False) * 1000)),
|
||||
for klass, konstr in (('Date', lambda *x: self.JS_Date(*x).valueOf()),
|
||||
('RegExp', self.JS_RegExp),
|
||||
('Error', self.Exception)):
|
||||
if not obj.startswith(klass + '('):
|
||||
@ -1034,6 +1109,7 @@ class JSInterpreter(object):
|
||||
'String': compat_str,
|
||||
'Math': float,
|
||||
'Array': list,
|
||||
'Date': self.JS_Date,
|
||||
}
|
||||
obj = local_vars.get(variable)
|
||||
if obj in (JS_Undefined, None):
|
||||
@ -1086,6 +1162,8 @@ class JSInterpreter(object):
|
||||
assertion(len(argvals) == 2, 'takes two arguments')
|
||||
return argvals[0] ** argvals[1]
|
||||
raise self.Exception('Unsupported Math method ' + member, expr=expr)
|
||||
elif obj is self.JS_Date:
|
||||
return getattr(obj, member)(*argvals)
|
||||
|
||||
if member == 'split':
|
||||
assertion(len(argvals) <= 2, 'takes at most two arguments')
|
||||
|
Loading…
x
Reference in New Issue
Block a user