refactor: remove tjaf dependency; add local TJA parser
This commit is contained in:
157
tools/categories.json
Normal file
157
tools/categories.json
Normal file
@@ -0,0 +1,157 @@
|
||||
[{
|
||||
"id": 1,
|
||||
"title": "Pop",
|
||||
"title_lang": {
|
||||
"ja": "J-POP",
|
||||
"en": "Pop",
|
||||
"cn": "流行音乐",
|
||||
"tw": "流行音樂",
|
||||
"ko": "POP"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 1,
|
||||
"background": "#219fbb",
|
||||
"border": ["#7ec3d3", "#0b6773"],
|
||||
"outline": "#005058",
|
||||
"info_fill": "#004d68",
|
||||
"bg_img": "bg_genre_0.png"
|
||||
},
|
||||
"aliases": [
|
||||
"jpop",
|
||||
"j-pop",
|
||||
"pops"
|
||||
]
|
||||
},{
|
||||
"id": 2,
|
||||
"title": "Anime",
|
||||
"title_lang": {
|
||||
"ja": "アニメ",
|
||||
"en": "Anime",
|
||||
"cn": "卡通动画音乐",
|
||||
"tw": "卡通動畫音樂",
|
||||
"ko": "애니메이션"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 2,
|
||||
"background": "#ff9700",
|
||||
"border": ["#ffdb8c", "#e75500"],
|
||||
"outline": "#9c4100",
|
||||
"info_fill": "#9c4002",
|
||||
"bg_img": "bg_genre_1.png"
|
||||
},
|
||||
"aliases": null
|
||||
},{
|
||||
"id": 3,
|
||||
"title": "VOCALOID™ Music",
|
||||
"title_lang": {
|
||||
"ja": "ボーカロイド™曲",
|
||||
"en": "VOCALOID™ Music"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 3,
|
||||
"background": "#def2ef",
|
||||
"border": ["#f7fbff", "#79919f"],
|
||||
"outline": "#5a6584",
|
||||
"info_fill": "#546184",
|
||||
"bg_img": "bg_genre_2.png"
|
||||
},
|
||||
"aliases": [
|
||||
"ボーカロイド曲",
|
||||
"ボーカロイド",
|
||||
"vocaloid music",
|
||||
"vocaloidmusic",
|
||||
"vocaloid"
|
||||
]
|
||||
},{
|
||||
"id": 4,
|
||||
"title": "Variety",
|
||||
"title_lang": {
|
||||
"ja": "バラエティ",
|
||||
"en": "Variety",
|
||||
"cn": "综合音乐",
|
||||
"tw": "綜合音樂",
|
||||
"ko": "버라이어티"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 4,
|
||||
"background": "#8fd321",
|
||||
"border": ["#f7fbff", "#587d0b"],
|
||||
"outline": "#374c00",
|
||||
"info_fill": "#3c6800",
|
||||
"bg_img": "bg_genre_3.png"
|
||||
},
|
||||
"aliases": [
|
||||
"バラエティー",
|
||||
"どうよう",
|
||||
"童謡・民謡",
|
||||
"children",
|
||||
"children/folk",
|
||||
"children-folk"
|
||||
]
|
||||
},{
|
||||
"id": 5,
|
||||
"title": "Classical",
|
||||
"title_lang": {
|
||||
"ja": "クラシック",
|
||||
"en": "Classical",
|
||||
"cn": "古典音乐",
|
||||
"tw": "古典音樂",
|
||||
"ko": "클래식"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 5,
|
||||
"background": "#d1a016",
|
||||
"border": ["#e7cf6b", "#9a6b00"],
|
||||
"outline": "#734d00",
|
||||
"info_fill": "#865800",
|
||||
"bg_img": "bg_genre_4.png"
|
||||
},
|
||||
"aliases": [
|
||||
"クラッシック",
|
||||
"classic"
|
||||
]
|
||||
},{
|
||||
"id": 6,
|
||||
"title": "Game Music",
|
||||
"title_lang": {
|
||||
"ja": "ゲームミュージック",
|
||||
"en": "Game Music",
|
||||
"cn": "游戏音乐",
|
||||
"tw": "遊戲音樂",
|
||||
"ko": "게임"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 6,
|
||||
"background": "#9c72c0",
|
||||
"border": ["#bda2ce", "#63407e"],
|
||||
"outline": "#4b1c74",
|
||||
"info_fill": "#4f2886",
|
||||
"bg_img": "bg_genre_5.png"
|
||||
},
|
||||
"aliases": [
|
||||
"game",
|
||||
"gamemusic"
|
||||
]
|
||||
},{
|
||||
"id": 7,
|
||||
"title": "NAMCO Original",
|
||||
"title_lang": {
|
||||
"ja": "ナムコオリジナル",
|
||||
"en": "NAMCO Original",
|
||||
"cn": "NAMCO原创音乐",
|
||||
"tw": "NAMCO原創音樂",
|
||||
"ko": "남코 오리지널"
|
||||
},
|
||||
"song_skin": {
|
||||
"sort": 7,
|
||||
"background": "#ff5716",
|
||||
"border": ["#ffa66b", "#b53000"],
|
||||
"outline": "#941c00",
|
||||
"info_fill": "#961e00",
|
||||
"bg_img": "bg_genre_6.png"
|
||||
},
|
||||
"aliases": [
|
||||
"namco",
|
||||
"namcooriginal"
|
||||
]
|
||||
}]
|
||||
38
tools/generate_previews.py
Normal file
38
tools/generate_previews.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
# .ogg preview generator for use when app and songs are on two different machines
|
||||
|
||||
import argparse
|
||||
import requests
|
||||
import os.path
|
||||
from ffmpy import FFmpeg
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate song previews.')
|
||||
parser.add_argument('site', help='Instance URL, eg. https://taiko.bui.pm')
|
||||
parser.add_argument('song_dir', help='Path to songs directory, eg. /srv/taiko/public/taiko/songs')
|
||||
parser.add_argument('--overwrite', action='store_true', help='Overwrite existing previews')
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
songs = requests.get('{}/api/songs'.format(args.site)).json()
|
||||
for i, song in enumerate(songs):
|
||||
print('{}/{} {} (id: {})'.format(i + 1, len(songs), song['title'], song['id']))
|
||||
|
||||
song_path = '{}/{}/main.{}'.format(args.song_dir, song['id'], song['music_type'] if 'music_type' in song else 'mp3')
|
||||
prev_path = '{}/{}/preview.ogg'.format(args.song_dir, song['id'])
|
||||
|
||||
if os.path.isfile(song_path):
|
||||
if not os.path.isfile(prev_path) or args.overwrite:
|
||||
if not song['preview'] or song['preview'] <= 0:
|
||||
print('Skipping due to no preview')
|
||||
continue
|
||||
|
||||
print('Making preview.ogg')
|
||||
ff = FFmpeg(inputs={song_path: '-ss %s' % song['preview']},
|
||||
outputs={prev_path: '-codec:a libvorbis -b:a 64k -ar 32000 -y -loglevel panic'})
|
||||
ff.run()
|
||||
else:
|
||||
print('Preview already exists')
|
||||
else:
|
||||
print('song file not found')
|
||||
4
tools/get_version.bat
Normal file
4
tools/get_version.bat
Normal file
@@ -0,0 +1,4 @@
|
||||
@echo off
|
||||
(
|
||||
git log -1 --pretty="format:{\"commit\": \"%%H\", \"commit_short\": \"%%h\", \"version\": \"%%ad\"}" --date="format:%%y.%%m.%%d"
|
||||
) > ../version.json
|
||||
3
tools/get_version.sh
Normal file
3
tools/get_version.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
toplevel=$( git rev-parse --show-toplevel )
|
||||
git log -1 --pretty="format:{\"commit\": \"%H\", \"commit_short\": \"%h\", \"version\": \"%ad\"}" --date="format:%y.%m.%d" > "$toplevel/version.json"
|
||||
2
tools/hooks/post-checkout
Normal file
2
tools/hooks/post-checkout
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
./tools/get_version.sh
|
||||
2
tools/hooks/post-commit
Normal file
2
tools/hooks/post-commit
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
./tools/get_version.sh
|
||||
2
tools/hooks/post-merge
Normal file
2
tools/hooks/post-merge
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
./tools/get_version.sh
|
||||
2
tools/hooks/post-rewrite
Normal file
2
tools/hooks/post-rewrite
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
./tools/get_version.sh
|
||||
196
tools/merge_image.htm
Normal file
196
tools/merge_image.htm
Normal file
@@ -0,0 +1,196 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Merge Image</title>
|
||||
<style>
|
||||
body{
|
||||
transition: background-color 0.5s;
|
||||
background-color: #fff;
|
||||
font-family: sans-serif;
|
||||
font-size: 20px;
|
||||
}
|
||||
input[type=number]{
|
||||
font-family: monospace;
|
||||
font-size: 18px;
|
||||
padding: 5px;
|
||||
}
|
||||
#settings{
|
||||
display: flex;
|
||||
margin-bottom: 18px;
|
||||
height: 40px;
|
||||
}
|
||||
label{
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
}
|
||||
label:not(:first-child){
|
||||
margin-left: 10px;
|
||||
border-left: 2px solid #ccc;
|
||||
padding-left: 8px;
|
||||
}
|
||||
input{
|
||||
margin: 5px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="settings">
|
||||
<label>
|
||||
Max height
|
||||
<input id="max-height" type="number" min="1" max="5000" step="1" value="2000">
|
||||
px
|
||||
</label>
|
||||
<label>
|
||||
Spacing
|
||||
<input id="spacing" type="number" min="0" max="100" step="1" value="0">
|
||||
px
|
||||
</label>
|
||||
<label>
|
||||
<input id="vertical" type="checkbox" checked>
|
||||
Vertical
|
||||
</label>
|
||||
</div>
|
||||
<div id="hint">Drag and drop your images here...</div>
|
||||
<canvas id="canvas"></canvas>
|
||||
<script>
|
||||
var canvas = document.getElementById("canvas")
|
||||
var ctx = canvas.getContext("2d")
|
||||
var allFiles
|
||||
var maxHeightElement = document.getElementById("max-height")
|
||||
var spacingElement = document.getElementById("spacing")
|
||||
var maxHeight = parseInt(maxHeightElement.value)
|
||||
var spacing = parseInt(spacingElement.value)
|
||||
var vectical = true
|
||||
|
||||
document.addEventListener("dragover", event => {
|
||||
event.preventDefault()
|
||||
event.dataTransfer.dropEffect = "copy"
|
||||
document.body.style.backgroundColor = "#ccc"
|
||||
})
|
||||
document.addEventListener("dragleave", () => {
|
||||
document.body.style.backgroundColor = "#fff"
|
||||
})
|
||||
document.addEventListener("drop", event => {
|
||||
document.getElementById("hint").style.display = "none"
|
||||
document.body.style.backgroundColor = "#fff"
|
||||
event.preventDefault()
|
||||
allFiles = []
|
||||
var promises = []
|
||||
for(let file of event.dataTransfer.files){
|
||||
promises.push(readFile(file))
|
||||
}
|
||||
Promise.all(promises).then(drawCanvas)
|
||||
})
|
||||
maxHeightElement.addEventListener("change", event => {
|
||||
var value = parseInt(event.currentTarget.value)
|
||||
if(value >= 1 && value <= 5000){
|
||||
maxHeight = value
|
||||
if(allFiles && allFiles.length){
|
||||
drawCanvas()
|
||||
}
|
||||
}
|
||||
})
|
||||
spacingElement.addEventListener("change", event => {
|
||||
var value = parseInt(event.currentTarget.value)
|
||||
if(value >= 0 && value <= 100){
|
||||
spacing = value
|
||||
if(allFiles && allFiles.length){
|
||||
drawCanvas()
|
||||
}
|
||||
}
|
||||
})
|
||||
document.getElementById("vertical").addEventListener("change", event => {
|
||||
vertical = event.currentTarget.checked
|
||||
if(allFiles && allFiles.length){
|
||||
drawCanvas()
|
||||
}
|
||||
})
|
||||
|
||||
function readFile(file){
|
||||
return new Promise((resolve, reject) => {
|
||||
var reader = new FileReader()
|
||||
reader.addEventListener("load", () => {
|
||||
if(reader.result){
|
||||
var img = document.createElement("img")
|
||||
img.addEventListener("load", () => {
|
||||
var noExt = file.name.slice(0, file.name.lastIndexOf("."))
|
||||
if(parseInt(noExt) == noExt){
|
||||
var name = parseInt(noExt)
|
||||
}else{
|
||||
var name = noExt
|
||||
}
|
||||
allFiles.push({
|
||||
name: name,
|
||||
img: img
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
img.addEventListener("error", resolve)
|
||||
img.addEventListener("abort", resolve)
|
||||
img.src = reader.result
|
||||
}else{
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
reader.addEventListener("error", resolve)
|
||||
reader.addEventListener("abort", resolve)
|
||||
reader.readAsDataURL(file)
|
||||
})
|
||||
}
|
||||
|
||||
function drawCanvas(){
|
||||
var x = 0
|
||||
var y = 0
|
||||
var biggestWidth = 0
|
||||
var canvasWidth = 0
|
||||
var canvasHeight = 0
|
||||
allFiles.sort((a, b) => a.name > b.name ? 1 : -1)
|
||||
for(var i in allFiles){
|
||||
var file = allFiles[i]
|
||||
if(vertical){
|
||||
if(y + file.img.height > maxHeight + spacing){
|
||||
y = 0
|
||||
x += biggestWidth
|
||||
biggestWidth = 0
|
||||
}
|
||||
file.x = x + (x === 0 ? 0 : spacing)
|
||||
file.y = y + (y === 0 ? 0 : spacing)
|
||||
y += file.img.height + (y === 0 ? 0 : spacing)
|
||||
if(file.img.width > biggestWidth){
|
||||
biggestWidth = file.img.width
|
||||
}
|
||||
if(y > canvasHeight){
|
||||
canvasHeight = y
|
||||
}
|
||||
}else{
|
||||
if(x + file.img.width > maxHeight + spacing){
|
||||
x = 0
|
||||
y += biggestWidth
|
||||
biggestWidth = 0
|
||||
}
|
||||
file.x = x + (x === 0 ? 0 : spacing)
|
||||
file.y = y + (y === 0 ? 0 : spacing)
|
||||
x += file.img.width + (x === 0 ? 0 : spacing)
|
||||
if(file.img.height > biggestWidth){
|
||||
biggestWidth = file.img.height
|
||||
}
|
||||
if(x > canvasWidth){
|
||||
canvasWidth = x
|
||||
}
|
||||
}
|
||||
}
|
||||
if(vertical){
|
||||
canvasWidth = x + biggestWidth
|
||||
}else{
|
||||
canvasHeight = y + biggestWidth
|
||||
}
|
||||
canvas.width = canvasWidth
|
||||
canvas.height = canvasHeight
|
||||
for(var i in allFiles){
|
||||
var file = allFiles[i]
|
||||
ctx.drawImage(file.img, file.x, file.y, file.img.width, file.img.height)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
114
tools/migrate_db.py
Normal file
114
tools/migrate_db.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
# Migrate old SQLite taiko.db to MongoDB
|
||||
|
||||
import sqlite3
|
||||
from pymongo import MongoClient
|
||||
|
||||
import os,sys,inspect
|
||||
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
parent_dir = os.path.dirname(current_dir)
|
||||
sys.path.insert(0, parent_dir)
|
||||
import config
|
||||
|
||||
client = MongoClient(config.MONGO['host'])
|
||||
client.drop_database(config.MONGO['database'])
|
||||
db = client[config.MONGO['database']]
|
||||
sqdb = sqlite3.connect('taiko.db')
|
||||
sqdb.row_factory = sqlite3.Row
|
||||
curs = sqdb.cursor()
|
||||
|
||||
def migrate_songs():
|
||||
curs.execute('select * from songs order by id')
|
||||
rows = curs.fetchall()
|
||||
|
||||
for row in rows:
|
||||
song = {
|
||||
'id': row['id'],
|
||||
'title': row['title'],
|
||||
'title_lang': {'ja': row['title'], 'en': None, 'cn': None, 'tw': None, 'ko': None},
|
||||
'subtitle': row['subtitle'],
|
||||
'subtitle_lang': {'ja': row['subtitle'], 'en': None, 'cn': None, 'tw': None, 'ko': None},
|
||||
'courses': {'easy': None, 'normal': None, 'hard': None, 'oni': None, 'ura': None},
|
||||
'enabled': True if row['enabled'] else False,
|
||||
'category_id': row['category'],
|
||||
'type': row['type'],
|
||||
'offset': row['offset'] or 0,
|
||||
'skin_id': row['skin_id'],
|
||||
'preview': row['preview'] or 0,
|
||||
'volume': row['volume'] or 1.0,
|
||||
'maker_id': row['maker_id'],
|
||||
'hash': row['hash'],
|
||||
'order': row['id']
|
||||
}
|
||||
|
||||
for diff in ['easy', 'normal', 'hard', 'oni', 'ura']:
|
||||
if row[diff]:
|
||||
spl = row[diff].split(' ')
|
||||
branch = False
|
||||
if len(spl) > 1 and spl[1] == 'B':
|
||||
branch = True
|
||||
|
||||
song['courses'][diff] = {'stars': int(spl[0]), 'branch': branch}
|
||||
|
||||
if row['title_lang']:
|
||||
langs = row['title_lang'].splitlines()
|
||||
for lang in langs:
|
||||
spl = lang.split(' ', 1)
|
||||
if spl[0] in ['ja', 'en', 'cn', 'tw', 'ko']:
|
||||
song['title_lang'][spl[0]] = spl[1]
|
||||
else:
|
||||
song['title_lang']['en'] = lang
|
||||
|
||||
if row['subtitle_lang']:
|
||||
langs = row['subtitle_lang'].splitlines()
|
||||
for lang in langs:
|
||||
spl = lang.split(' ', 1)
|
||||
if spl[0] in ['ja', 'en', 'cn', 'tw', 'ko']:
|
||||
song['subtitle_lang'][spl[0]] = spl[1]
|
||||
else:
|
||||
song['subtitle_lang']['en'] = lang
|
||||
|
||||
db.songs.insert_one(song)
|
||||
last_song = song['id']
|
||||
|
||||
db.seq.insert_one({'name': 'songs', 'value': last_song})
|
||||
|
||||
def migrate_makers():
|
||||
curs.execute('select * from makers')
|
||||
rows = curs.fetchall()
|
||||
|
||||
for row in rows:
|
||||
db.makers.insert_one({
|
||||
'id': row['maker_id'],
|
||||
'name': row['name'],
|
||||
'url': row['url']
|
||||
})
|
||||
|
||||
def migrate_categories():
|
||||
curs.execute('select * from categories')
|
||||
rows = curs.fetchall()
|
||||
|
||||
for row in rows:
|
||||
db.categories.insert_one({
|
||||
'id': row['id'],
|
||||
'title': row['title']
|
||||
})
|
||||
|
||||
def migrate_song_skins():
|
||||
curs.execute('select * from song_skins')
|
||||
rows = curs.fetchall()
|
||||
|
||||
for row in rows:
|
||||
db.song_skins.insert_one({
|
||||
'id': row['id'],
|
||||
'name': row['name'],
|
||||
'song': row['song'],
|
||||
'stage': row['stage'],
|
||||
'don': row['don']
|
||||
})
|
||||
|
||||
if __name__ == '__main__':
|
||||
migrate_songs()
|
||||
migrate_makers()
|
||||
migrate_categories()
|
||||
migrate_song_skins()
|
||||
25
tools/nginx.conf
Normal file
25
tools/nginx.conf
Normal file
@@ -0,0 +1,25 @@
|
||||
server {
|
||||
listen 80;
|
||||
#server_name taiko.example.com;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $server_name;
|
||||
proxy_pass http://127.0.0.1:34801;
|
||||
}
|
||||
|
||||
location ~ ^/(assets/|songs/|src/|manifest.json$) {
|
||||
root /srv/taiko-web/public;
|
||||
location ~ ^/songs/(\d+)/preview\.mp3$ {
|
||||
try_files $uri /api/preview?id=$1;
|
||||
}
|
||||
}
|
||||
|
||||
location /p2 {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
proxy_pass http://127.0.0.1:34802;
|
||||
}
|
||||
}
|
||||
28
tools/nginx_subdir.conf
Normal file
28
tools/nginx_subdir.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
server {
|
||||
listen 80;
|
||||
#server_name taiko.example.com;
|
||||
|
||||
location /taiko-web/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $server_name;
|
||||
proxy_pass http://127.0.0.1:34801;
|
||||
}
|
||||
|
||||
location ~ ^/taiko-web/(assets/|songs/|src/|manifest.json$) {
|
||||
rewrite ^/taiko-web/(.*) /$1 break;
|
||||
root /srv/taiko-web/public;
|
||||
location ~ ^/taiko-web/songs/([0-9]+)/preview\.mp3$ {
|
||||
set $id $1;
|
||||
rewrite ^/taiko-web/(.*) /$1 break;
|
||||
try_files $uri /taiko-web/api/preview?id=$id;
|
||||
}
|
||||
}
|
||||
|
||||
location /taiko-web/p2 {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
proxy_pass http://127.0.0.1:34802;
|
||||
}
|
||||
}
|
||||
86
tools/set_previews.py
Normal file
86
tools/set_previews.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from __future__ import division
|
||||
import os
|
||||
import sqlite3
|
||||
import re
|
||||
DATABASE = 'taiko.db'
|
||||
|
||||
conn = sqlite3.connect(DATABASE)
|
||||
curs = conn.cursor()
|
||||
|
||||
def parse_osu(osu):
|
||||
osu_lines = open(osu, 'r').read().replace('\x00', '').split('\n')
|
||||
sections = {}
|
||||
current_section = (None, [])
|
||||
|
||||
for line in osu_lines:
|
||||
line = line.strip()
|
||||
secm = re.match('^\[(\w+)\]$', line)
|
||||
if secm:
|
||||
if current_section:
|
||||
sections[current_section[0]] = current_section[1]
|
||||
current_section = (secm.group(1), [])
|
||||
else:
|
||||
if current_section:
|
||||
current_section[1].append(line)
|
||||
else:
|
||||
current_section = ('Default', [line])
|
||||
|
||||
if current_section:
|
||||
sections[current_section[0]] = current_section[1]
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def get_osu_key(osu, section, key, default=None):
|
||||
sec = osu[section]
|
||||
for line in sec:
|
||||
ok = line.split(':', 1)[0].strip()
|
||||
ov = line.split(':', 1)[1].strip()
|
||||
|
||||
if ok.lower() == key.lower():
|
||||
return ov
|
||||
|
||||
return default
|
||||
|
||||
|
||||
def get_preview(song_id, song_type):
|
||||
preview = 0
|
||||
|
||||
if song_type == "tja":
|
||||
if os.path.isfile('public/songs/%s/main.tja' % song_id):
|
||||
preview = get_tja_preview('public/songs/%s/main.tja' % song_id)
|
||||
else:
|
||||
osus = [osu for osu in os.listdir('public/songs/%s' % song_id) if osu in ['easy.osu', 'normal.osu', 'hard.osu', 'oni.osu']]
|
||||
if osus:
|
||||
osud = parse_osu('public/songs/%s/%s' % (song_id, osus[0]))
|
||||
preview = int(get_osu_key(osud, 'General', 'PreviewTime', 0))
|
||||
|
||||
return preview
|
||||
|
||||
|
||||
def get_tja_preview(tja):
|
||||
tja_lines = open(tja, 'r').read().replace('\x00', '').split('\n')
|
||||
|
||||
for line in tja_lines:
|
||||
line = line.strip()
|
||||
if ':' in line:
|
||||
name, value = line.split(':', 1)
|
||||
if name.lower() == 'demostart':
|
||||
value = value.strip()
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
return int(value * 1000)
|
||||
elif line.lower() == '#start':
|
||||
break
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
songs = curs.execute('select id, type from songs').fetchall()
|
||||
for song in songs:
|
||||
preview = get_preview(song[0], song[1]) / 1000
|
||||
curs.execute('update songs set preview = ? where id = ?', (preview, song[0]))
|
||||
conn.commit()
|
||||
66
tools/setup.sh
Normal file
66
tools/setup.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y git python3-pip python3-virtualenv python3-venv nginx ffmpeg redis supervisor
|
||||
|
||||
if [[ -r /etc/os-release ]]; then
|
||||
. /etc/os-release
|
||||
if [[ $ID = ubuntu ]]; then
|
||||
# MongoDB supports only LTS and has not released package for Ubuntu 22.04 LTS yet
|
||||
case $VERSION_CODENAME in
|
||||
impish|kinetic|jammy)
|
||||
VERSION_CODENAME=focal ;;
|
||||
esac
|
||||
REPO="https://repo.mongodb.org/apt/ubuntu $VERSION_CODENAME/mongodb-org/5.0 multiverse"
|
||||
elif [[ $ID = debian ]]; then
|
||||
# MongoDB does not provide packages for Debian 11 yet
|
||||
case $VERSION_CODENAME in
|
||||
bullseye|bookworm|sid)
|
||||
VERSION_CODENAME=buster ;;
|
||||
esac
|
||||
REPO="https://repo.mongodb.org/apt/debian $VERSION_CODENAME/mongodb-org/5.0 main"
|
||||
else
|
||||
echo "Unsupported distribution $ID"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Not running a distribution with /etc/os-release available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-5.0.asc | sudo tee /etc/apt/trusted.gpg.d/mongodb-server-5.0.asc
|
||||
echo "deb [ arch=amd64,arm64 ] $REPO" | sudo tee /etc/apt/sources.list.d/mongodb-org-5.0.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y mongodb-org
|
||||
|
||||
sudo mkdir -p /srv/taiko-web
|
||||
sudo chown $USER /srv/taiko-web
|
||||
git clone https://github.com/bui/taiko-web.git /srv/taiko-web
|
||||
|
||||
cd /srv/taiko-web
|
||||
tools/get_version.sh
|
||||
cp tools/hooks/* .git/hooks/
|
||||
cp config.example.py config.py
|
||||
sudo cp tools/nginx.conf /etc/nginx/conf.d/taiko-web.conf
|
||||
|
||||
sudo sed -i 's/^\(\s\{0,\}\)\(include \/etc\/nginx\/sites-enabled\/\*;\)$/\1#\2/g' /etc/nginx/nginx.conf
|
||||
sudo sed -i 's/}/ application\/wasm wasm;\n}/g' /etc/nginx/mime.types
|
||||
sudo service nginx restart
|
||||
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install --upgrade pip wheel setuptools
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
|
||||
sudo mkdir -p /var/log/taiko-web
|
||||
sudo cp tools/supervisor.conf /etc/supervisor/conf.d/taiko-web.conf
|
||||
sudo service supervisor restart
|
||||
|
||||
sudo systemctl enable mongod.service
|
||||
sudo service mongod start
|
||||
|
||||
IP=$(dig +short txt ch whoami.cloudflare @1.0.0.1 | tr -d '"')
|
||||
echo
|
||||
echo "Setup complete! You should be able to access your taiko-web instance at http://$IP"
|
||||
echo
|
||||
15
tools/supervisor.conf
Normal file
15
tools/supervisor.conf
Normal file
@@ -0,0 +1,15 @@
|
||||
[program:taiko_app]
|
||||
directory=/srv/taiko-web
|
||||
command=/srv/taiko-web/.venv/bin/gunicorn -b 127.0.0.1:34801 app:app
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/var/log/taiko-web/app.out.log
|
||||
stderr_logfile=/var/log/taiko-web/app.err.log
|
||||
|
||||
[program:taiko_server]
|
||||
directory=/srv/taiko-web
|
||||
command=/srv/taiko-web/.venv/bin/python server.py 34802
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/var/log/taiko-web/server.out.log
|
||||
stderr_logfile=/var/log/taiko-web/server.err.log
|
||||
49
tools/taikodb_hash.py
Normal file
49
tools/taikodb_hash.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import sys
|
||||
import hashlib
|
||||
import base64
|
||||
import sqlite3
|
||||
|
||||
def md5(md5hash, filename):
|
||||
with open(filename, "rb") as file:
|
||||
for chunk in iter(lambda: file.read(64 * 1024), b""):
|
||||
md5hash.update(chunk)
|
||||
|
||||
def get_hashes(root):
|
||||
hashes = {}
|
||||
diffs = ["easy", "normal", "hard", "oni", "ura"]
|
||||
dirs = os.listdir(root)
|
||||
for dir in dirs:
|
||||
dir_path = os.path.join(root, dir)
|
||||
if dir.isdigit() and os.path.isdir(dir_path):
|
||||
files = os.listdir(dir_path)
|
||||
md5hash = hashlib.md5()
|
||||
if "main.tja" in files:
|
||||
md5(md5hash, os.path.join(dir_path, "main.tja"))
|
||||
else:
|
||||
for diff in diffs:
|
||||
if diff + ".osu" in files:
|
||||
md5(md5hash, os.path.join(dir_path, diff + ".osu"))
|
||||
hashes[dir] = base64.b64encode(md5hash.digest())[:-2]
|
||||
return hashes
|
||||
|
||||
def write_db(database, songs):
|
||||
db = sqlite3.connect(database)
|
||||
hashes = get_hashes(songs)
|
||||
added = 0
|
||||
for id in hashes:
|
||||
added += 1
|
||||
cur = db.cursor()
|
||||
cur.execute("update songs set hash = ? where id = ?", (hashes[id].decode(), int(id)))
|
||||
cur.close()
|
||||
db.commit()
|
||||
db.close()
|
||||
if added:
|
||||
print("{0} hashes have been added to the database.".format(added))
|
||||
else:
|
||||
print("Error: No songs were found in the given directory.")
|
||||
|
||||
if len(sys.argv) >= 3:
|
||||
write_db(sys.argv[1], sys.argv[2])
|
||||
else:
|
||||
print("Usage: taikodb_hash.py ../taiko.db ../public/songs")
|
||||
Reference in New Issue
Block a user