@@ -0,0 +1,26 @@ | |||
# Maintainer: John Jenkins twodopeshaggy@gmail.com | |||
pkgname=kiara-git | |||
pkgver=r50.d9ea66e | |||
pkgrel=1 | |||
pkgdesc="Automatically sorts anime using information from anidb.net" | |||
arch=('any') | |||
url="https://github.com/hartfelt/kiara" | |||
license=('MIT') | |||
install=$pkgname.install | |||
makedepends=('git') | |||
depends=('python' 'python-colorama' 'python-setuptools') | |||
source=('git+https://github.com/hartfelt/kiara.git') | |||
sha256sums=('SKIP') | |||
pkgver() { | |||
cd "$srcdir/kiara" | |||
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)" | |||
} | |||
package() { | |||
cd "$srcdir/kiara" | |||
python setup.py install --root="$pkgdir/" --optimize=1 | |||
mkdir -p $pkgdir/usr/share/licenses/$pkgname | |||
install -m 0644 kiararc $pkgdir/etc/ | |||
} |
@@ -0,0 +1,3 @@ | |||
post_install() { | |||
echo "To configure, edit /etc/kiararc or copy to ~/.kiararc and edit." | |||
} |
@@ -0,0 +1,5 @@ | |||
d9ea66e619b355a121cbda1a79082707887cd423 not-for-merge branch 'master' of https://github.com/hartfelt/kiara | |||
09ea64ca33b4e51619165656c2dbd09bb0956ad8 not-for-merge 'refs/pull/3/head' of https://github.com/hartfelt/kiara | |||
bbb1133be4b69b825eb5004ed894ba7b5448313c not-for-merge 'refs/pull/3/merge' of https://github.com/hartfelt/kiara | |||
9a872f515df84662713206bed08e97163655a9b4 not-for-merge 'refs/pull/4/head' of https://github.com/hartfelt/kiara | |||
6c44d2241903bbe39fd5bb05a52adb5ca3fb0be1 not-for-merge 'refs/pull/4/merge' of https://github.com/hartfelt/kiara |
@@ -0,0 +1 @@ | |||
ref: refs/heads/master |
@@ -0,0 +1,8 @@ | |||
[core] | |||
repositoryformatversion = 0 | |||
filemode = true | |||
bare = true | |||
[remote "origin"] | |||
url = https://github.com/hartfelt/kiara.git | |||
fetch = +refs/*:refs/* | |||
mirror = true |
@@ -0,0 +1 @@ | |||
Unnamed repository; edit this file 'description' to name the repository. |
@@ -0,0 +1,15 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to check the commit log message taken by | |||
# applypatch from an e-mail message. | |||
# | |||
# The hook should exit with non-zero status after issuing an | |||
# appropriate message if it wants to stop the commit. The hook is | |||
# allowed to edit the commit message file. | |||
# | |||
# To enable this hook, rename this file to "applypatch-msg". | |||
. git-sh-setup | |||
test -x "$GIT_DIR/hooks/commit-msg" && | |||
exec "$GIT_DIR/hooks/commit-msg" ${1+"$@"} | |||
: |
@@ -0,0 +1,24 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to check the commit log message. | |||
# Called by "git commit" with one argument, the name of the file | |||
# that has the commit message. The hook should exit with non-zero | |||
# status after issuing an appropriate message if it wants to stop the | |||
# commit. The hook is allowed to edit the commit message file. | |||
# | |||
# To enable this hook, rename this file to "commit-msg". | |||
# Uncomment the below to add a Signed-off-by line to the message. | |||
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg | |||
# hook is more suited to it. | |||
# | |||
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') | |||
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" | |||
# This example catches duplicate Signed-off-by lines. | |||
test "" = "$(grep '^Signed-off-by: ' "$1" | | |||
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { | |||
echo >&2 Duplicate Signed-off-by lines. | |||
exit 1 | |||
} |
@@ -0,0 +1,8 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to prepare a packed repository for use over | |||
# dumb transports. | |||
# | |||
# To enable this hook, rename this file to "post-update". | |||
exec git update-server-info |
@@ -0,0 +1,14 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to verify what is about to be committed | |||
# by applypatch from an e-mail message. | |||
# | |||
# The hook should exit with non-zero status after issuing an | |||
# appropriate message if it wants to stop the commit. | |||
# | |||
# To enable this hook, rename this file to "pre-applypatch". | |||
. git-sh-setup | |||
test -x "$GIT_DIR/hooks/pre-commit" && | |||
exec "$GIT_DIR/hooks/pre-commit" ${1+"$@"} | |||
: |
@@ -0,0 +1,49 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to verify what is about to be committed. | |||
# Called by "git commit" with no arguments. The hook should | |||
# exit with non-zero status after issuing an appropriate message if | |||
# it wants to stop the commit. | |||
# | |||
# To enable this hook, rename this file to "pre-commit". | |||
if git rev-parse --verify HEAD >/dev/null 2>&1 | |||
then | |||
against=HEAD | |||
else | |||
# Initial commit: diff against an empty tree object | |||
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 | |||
fi | |||
# If you want to allow non-ASCII filenames set this variable to true. | |||
allownonascii=$(git config --bool hooks.allownonascii) | |||
# Redirect output to stderr. | |||
exec 1>&2 | |||
# Cross platform projects tend to avoid non-ASCII filenames; prevent | |||
# them from being added to the repository. We exploit the fact that the | |||
# printable range starts at the space character and ends with tilde. | |||
if [ "$allownonascii" != "true" ] && | |||
# Note that the use of brackets around a tr range is ok here, (it's | |||
# even required, for portability to Solaris 10's /usr/bin/tr), since | |||
# the square bracket bytes happen to fall in the designated range. | |||
test $(git diff --cached --name-only --diff-filter=A -z $against | | |||
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 | |||
then | |||
cat <<\EOF | |||
Error: Attempt to add a non-ASCII file name. | |||
This can cause problems if you want to work with people on other platforms. | |||
To be portable it is advisable to rename the file. | |||
If you know what you are doing you can disable this check using: | |||
git config hooks.allownonascii true | |||
EOF | |||
exit 1 | |||
fi | |||
# If there are whitespace errors, print the offending file names and fail. | |||
exec git diff-index --check --cached $against -- |
@@ -0,0 +1,53 @@ | |||
#!/bin/sh | |||
# An example hook script to verify what is about to be pushed. Called by "git | |||
# push" after it has checked the remote status, but before anything has been | |||
# pushed. If this script exits with a non-zero status nothing will be pushed. | |||
# | |||
# This hook is called with the following parameters: | |||
# | |||
# $1 -- Name of the remote to which the push is being done | |||
# $2 -- URL to which the push is being done | |||
# | |||
# If pushing without using a named remote those arguments will be equal. | |||
# | |||
# Information about the commits which are being pushed is supplied as lines to | |||
# the standard input in the form: | |||
# | |||
# <local ref> <local sha1> <remote ref> <remote sha1> | |||
# | |||
# This sample shows how to prevent push of commits where the log message starts | |||
# with "WIP" (work in progress). | |||
remote="$1" | |||
url="$2" | |||
z40=0000000000000000000000000000000000000000 | |||
while read local_ref local_sha remote_ref remote_sha | |||
do | |||
if [ "$local_sha" = $z40 ] | |||
then | |||
# Handle delete | |||
: | |||
else | |||
if [ "$remote_sha" = $z40 ] | |||
then | |||
# New branch, examine all commits | |||
range="$local_sha" | |||
else | |||
# Update to existing branch, examine new commits | |||
range="$remote_sha..$local_sha" | |||
fi | |||
# Check for WIP commit | |||
commit=`git rev-list -n 1 --grep '^WIP' "$range"` | |||
if [ -n "$commit" ] | |||
then | |||
echo >&2 "Found WIP commit in $local_ref, not pushing" | |||
exit 1 | |||
fi | |||
fi | |||
done | |||
exit 0 |
@@ -0,0 +1,169 @@ | |||
#!/bin/sh | |||
# | |||
# Copyright (c) 2006, 2008 Junio C Hamano | |||
# | |||
# The "pre-rebase" hook is run just before "git rebase" starts doing | |||
# its job, and can prevent the command from running by exiting with | |||
# non-zero status. | |||
# | |||
# The hook is called with the following parameters: | |||
# | |||
# $1 -- the upstream the series was forked from. | |||
# $2 -- the branch being rebased (or empty when rebasing the current branch). | |||
# | |||
# This sample shows how to prevent topic branches that are already | |||
# merged to 'next' branch from getting rebased, because allowing it | |||
# would result in rebasing already published history. | |||
publish=next | |||
basebranch="$1" | |||
if test "$#" = 2 | |||
then | |||
topic="refs/heads/$2" | |||
else | |||
topic=`git symbolic-ref HEAD` || | |||
exit 0 ;# we do not interrupt rebasing detached HEAD | |||
fi | |||
case "$topic" in | |||
refs/heads/??/*) | |||
;; | |||
*) | |||
exit 0 ;# we do not interrupt others. | |||
;; | |||
esac | |||
# Now we are dealing with a topic branch being rebased | |||
# on top of master. Is it OK to rebase it? | |||
# Does the topic really exist? | |||
git show-ref -q "$topic" || { | |||
echo >&2 "No such branch $topic" | |||
exit 1 | |||
} | |||
# Is topic fully merged to master? | |||
not_in_master=`git rev-list --pretty=oneline ^master "$topic"` | |||
if test -z "$not_in_master" | |||
then | |||
echo >&2 "$topic is fully merged to master; better remove it." | |||
exit 1 ;# we could allow it, but there is no point. | |||
fi | |||
# Is topic ever merged to next? If so you should not be rebasing it. | |||
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` | |||
only_next_2=`git rev-list ^master ${publish} | sort` | |||
if test "$only_next_1" = "$only_next_2" | |||
then | |||
not_in_topic=`git rev-list "^$topic" master` | |||
if test -z "$not_in_topic" | |||
then | |||
echo >&2 "$topic is already up-to-date with master" | |||
exit 1 ;# we could allow it, but there is no point. | |||
else | |||
exit 0 | |||
fi | |||
else | |||
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` | |||
/usr/bin/perl -e ' | |||
my $topic = $ARGV[0]; | |||
my $msg = "* $topic has commits already merged to public branch:\n"; | |||
my (%not_in_next) = map { | |||
/^([0-9a-f]+) /; | |||
($1 => 1); | |||
} split(/\n/, $ARGV[1]); | |||
for my $elem (map { | |||
/^([0-9a-f]+) (.*)$/; | |||
[$1 => $2]; | |||
} split(/\n/, $ARGV[2])) { | |||
if (!exists $not_in_next{$elem->[0]}) { | |||
if ($msg) { | |||
print STDERR $msg; | |||
undef $msg; | |||
} | |||
print STDERR " $elem->[1]\n"; | |||
} | |||
} | |||
' "$topic" "$not_in_next" "$not_in_master" | |||
exit 1 | |||
fi | |||
exit 0 | |||
################################################################ | |||
This sample hook safeguards topic branches that have been | |||
published from being rewound. | |||
The workflow assumed here is: | |||
* Once a topic branch forks from "master", "master" is never | |||
merged into it again (either directly or indirectly). | |||
* Once a topic branch is fully cooked and merged into "master", | |||
it is deleted. If you need to build on top of it to correct | |||
earlier mistakes, a new topic branch is created by forking at | |||
the tip of the "master". This is not strictly necessary, but | |||
it makes it easier to keep your history simple. | |||
* Whenever you need to test or publish your changes to topic | |||
branches, merge them into "next" branch. | |||
The script, being an example, hardcodes the publish branch name | |||
to be "next", but it is trivial to make it configurable via | |||
$GIT_DIR/config mechanism. | |||
With this workflow, you would want to know: | |||
(1) ... if a topic branch has ever been merged to "next". Young | |||
topic branches can have stupid mistakes you would rather | |||
clean up before publishing, and things that have not been | |||
merged into other branches can be easily rebased without | |||
affecting other people. But once it is published, you would | |||
not want to rewind it. | |||
(2) ... if a topic branch has been fully merged to "master". | |||
Then you can delete it. More importantly, you should not | |||
build on top of it -- other people may already want to | |||
change things related to the topic as patches against your | |||
"master", so if you need further changes, it is better to | |||
fork the topic (perhaps with the same name) afresh from the | |||
tip of "master". | |||
Let's look at this example: | |||
o---o---o---o---o---o---o---o---o---o "next" | |||
/ / / / | |||
/ a---a---b A / / | |||
/ / / / | |||
/ / c---c---c---c B / | |||
/ / / \ / | |||
/ / / b---b C \ / | |||
/ / / / \ / | |||
---o---o---o---o---o---o---o---o---o---o---o "master" | |||
A, B and C are topic branches. | |||
* A has one fix since it was merged up to "next". | |||
* B has finished. It has been fully merged up to "master" and "next", | |||
and is ready to be deleted. | |||
* C has not merged to "next" at all. | |||
We would want to allow C to be rebased, refuse A, and encourage | |||
B to be deleted. | |||
To compute (1): | |||
git rev-list ^master ^topic next | |||
git rev-list ^master next | |||
if these match, topic has not merged in next at all. | |||
To compute (2): | |||
git rev-list master..topic | |||
if this is empty, it is fully merged to "master". |
@@ -0,0 +1,36 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to prepare the commit log message. | |||
# Called by "git commit" with the name of the file that has the | |||
# commit message, followed by the description of the commit | |||
# message's source. The hook's purpose is to edit the commit | |||
# message file. If the hook fails with a non-zero status, | |||
# the commit is aborted. | |||
# | |||
# To enable this hook, rename this file to "prepare-commit-msg". | |||
# This hook includes three examples. The first comments out the | |||
# "Conflicts:" part of a merge commit. | |||
# | |||
# The second includes the output of "git diff --name-status -r" | |||
# into the message, just before the "git status" output. It is | |||
# commented because it doesn't cope with --amend or with squashed | |||
# commits. | |||
# | |||
# The third example adds a Signed-off-by line to the message, that can | |||
# still be edited. This is rarely a good idea. | |||
case "$2,$3" in | |||
merge,) | |||
/usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; | |||
# ,|template,) | |||
# /usr/bin/perl -i.bak -pe ' | |||
# print "\n" . `git diff --cached --name-status -r` | |||
# if /^#/ && $first++ == 0' "$1" ;; | |||
*) ;; | |||
esac | |||
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') | |||
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" |
@@ -0,0 +1,128 @@ | |||
#!/bin/sh | |||
# | |||
# An example hook script to blocks unannotated tags from entering. | |||
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new | |||
# | |||
# To enable this hook, rename this file to "update". | |||
# | |||
# Config | |||
# ------ | |||
# hooks.allowunannotated | |||
# This boolean sets whether unannotated tags will be allowed into the | |||
# repository. By default they won't be. | |||
# hooks.allowdeletetag | |||
# This boolean sets whether deleting tags will be allowed in the | |||
# repository. By default they won't be. | |||
# hooks.allowmodifytag | |||
# This boolean sets whether a tag may be modified after creation. By default | |||
# it won't be. | |||
# hooks.allowdeletebranch | |||
# This boolean sets whether deleting branches will be allowed in the | |||
# repository. By default they won't be. | |||
# hooks.denycreatebranch | |||
# This boolean sets whether remotely creating branches will be denied | |||
# in the repository. By default this is allowed. | |||
# | |||
# --- Command line | |||
refname="$1" | |||
oldrev="$2" | |||
newrev="$3" | |||
# --- Safety check | |||
if [ -z "$GIT_DIR" ]; then | |||
echo "Don't run this script from the command line." >&2 | |||
echo " (if you want, you could supply GIT_DIR then run" >&2 | |||
echo " $0 <ref> <oldrev> <newrev>)" >&2 | |||
exit 1 | |||
fi | |||
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then | |||
echo "usage: $0 <ref> <oldrev> <newrev>" >&2 | |||
exit 1 | |||
fi | |||
# --- Config | |||
allowunannotated=$(git config --bool hooks.allowunannotated) | |||
allowdeletebranch=$(git config --bool hooks.allowdeletebranch) | |||
denycreatebranch=$(git config --bool hooks.denycreatebranch) | |||
allowdeletetag=$(git config --bool hooks.allowdeletetag) | |||
allowmodifytag=$(git config --bool hooks.allowmodifytag) | |||
# check for no description | |||
projectdesc=$(sed -e '1q' "$GIT_DIR/description") | |||
case "$projectdesc" in | |||
"Unnamed repository"* | "") | |||
echo "*** Project description file hasn't been set" >&2 | |||
exit 1 | |||
;; | |||
esac | |||
# --- Check types | |||
# if $newrev is 0000...0000, it's a commit to delete a ref. | |||
zero="0000000000000000000000000000000000000000" | |||
if [ "$newrev" = "$zero" ]; then | |||
newrev_type=delete | |||
else | |||
newrev_type=$(git cat-file -t $newrev) | |||
fi | |||
case "$refname","$newrev_type" in | |||
refs/tags/*,commit) | |||
# un-annotated tag | |||
short_refname=${refname##refs/tags/} | |||
if [ "$allowunannotated" != "true" ]; then | |||
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 | |||
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 | |||
exit 1 | |||
fi | |||
;; | |||
refs/tags/*,delete) | |||
# delete tag | |||
if [ "$allowdeletetag" != "true" ]; then | |||
echo "*** Deleting a tag is not allowed in this repository" >&2 | |||
exit 1 | |||
fi | |||
;; | |||
refs/tags/*,tag) | |||
# annotated tag | |||
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 | |||
then | |||
echo "*** Tag '$refname' already exists." >&2 | |||
echo "*** Modifying a tag is not allowed in this repository." >&2 | |||
exit 1 | |||
fi | |||
;; | |||
refs/heads/*,commit) | |||
# branch | |||
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then | |||
echo "*** Creating a branch is not allowed in this repository" >&2 | |||
exit 1 | |||
fi | |||
;; | |||
refs/heads/*,delete) | |||
# delete branch | |||
if [ "$allowdeletebranch" != "true" ]; then | |||
echo "*** Deleting a branch is not allowed in this repository" >&2 | |||
exit 1 | |||
fi | |||
;; | |||
refs/remotes/*,commit) | |||
# tracking branch | |||
;; | |||
refs/remotes/*,delete) | |||
# delete tracking branch | |||
if [ "$allowdeletebranch" != "true" ]; then | |||
echo "*** Deleting a tracking branch is not allowed in this repository" >&2 | |||
exit 1 | |||
fi | |||
;; | |||
*) | |||
# Anything else (is there anything else?) | |||
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 | |||
exit 1 | |||
;; | |||
esac | |||
# --- Finished | |||
exit 0 |
@@ -0,0 +1,6 @@ | |||
# git ls-files --others --exclude-from=.git/info/exclude | |||
# Lines that start with '#' are comments. | |||
# For a project mostly in C, the following would be a good set of | |||
# exclude patterns (uncomment them if you want to use them): | |||
# *.[oa] | |||
# *~ |
@@ -0,0 +1,6 @@ | |||
# pack-refs with: peeled fully-peeled | |||
d9ea66e619b355a121cbda1a79082707887cd423 refs/heads/master | |||
09ea64ca33b4e51619165656c2dbd09bb0956ad8 refs/pull/3/head | |||
bbb1133be4b69b825eb5004ed894ba7b5448313c refs/pull/3/merge | |||
9a872f515df84662713206bed08e97163655a9b4 refs/pull/4/head | |||
6c44d2241903bbe39fd5bb05a52adb5ca3fb0be1 refs/pull/4/merge |
@@ -0,0 +1,3 @@ | |||
post_install() { | |||
echo "To configure, edit /etc/kiararc or copy to ~/.kiararc and edit." | |||
} |
@@ -0,0 +1,25 @@ | |||
# Generated by makepkg 4.2.1 | |||
# using fakeroot version 1.20.2 | |||
# Sat May 2 01:54:27 UTC 2015 | |||
pkgname = kiara-git | |||
pkgver = r50.d9ea66e-1 | |||
pkgdesc = Automatically sorts anime using information from anidb.net | |||
url = https://github.com/hartfelt/kiara | |||
builddate = 1430531667 | |||
packager = Unknown Packager | |||
size = 130048 | |||
arch = any | |||
license = MIT | |||
depend = python | |||
depend = python-colorama | |||
depend = python-setuptools | |||
makedepend = git | |||
makepkgopt = strip | |||
makepkgopt = docs | |||
makepkgopt = !libtool | |||
makepkgopt = !staticlibs | |||
makepkgopt = emptydirs | |||
makepkgopt = zipman | |||
makepkgopt = purge | |||
makepkgopt = !upx | |||
makepkgopt = !debug |
@@ -0,0 +1,27 @@ | |||
### Default configuration file for kiara.py | |||
# Copy this to ~/.kiararc and change to your needs. Remove the # (hash-sign) in | |||
# front of the options you use. | |||
### Your anidb login | |||
#user johndoe | |||
#pass secretpasswordinplaintextomg | |||
### Base paths for the organize-option | |||
# Some mediacenters like to distinguish between movies and series, so you can | |||
# set a separate path for each. If you don't care for this distinction, you can | |||
# just set the same path for both. | |||
#basepath_movie ~/anime/movies | |||
#basepath_series ~/anime/series | |||
### Optional settings | |||
# Where should kiara cache file meta-data? | |||
# It is safe to share this path with multible users. | |||
#database ~/.kiara.db | |||
# Location of the socket that's used to communicate with the backend | |||
#session ~/.kiara.session | |||
# The anidb api endpoint. This is configurable as per anidb recommandations, | |||
# though there should be no reason to change this. | |||
#host api.anidb.net | |||
#port 9000 |
@@ -0,0 +1,234 @@ | |||
#!/bin/python | |||
import argparse | |||
import os, os.path | |||
import sys | |||
import libkiara | |||
parser = argparse.ArgumentParser( | |||
description='Do stuff with anime files and anidb.') | |||
parser.add_argument('-w', '--watch', | |||
action='store_true', dest='watch', | |||
help='Mark all the files watched.') | |||
parser.add_argument('-o', '--organize', | |||
action='store_true', dest='organize', | |||
help='Organize ALL THE FILES _o/') | |||
parser.add_argument('--copy', | |||
action='store_true', dest='organize_copy', | |||
help='When organizing files, copy them instead of moving them.') | |||
parser.add_argument('--overwrite', | |||
action='store_true', dest='organize_overwrite', | |||
help='When organizing files, always overwrite any existing files.') | |||
parser.add_argument('--skip-update', | |||
action='store_false', dest='update_info', default=True, | |||
help='Skip updating file info from anidb, when the cached info is old. ' | |||
'(missing info will still be fetched)') | |||
parser.add_argument('-c', '--config', | |||
action='store', dest='config', type=argparse.FileType('r'), | |||
help='Alternative config file to use.') | |||
parser.add_argument('--find-duplicates', | |||
action='store_true', dest='find_duplicates', | |||
help='Lists all episode for which you have more than one file') | |||
parser.add_argument('--forget', | |||
type=int, metavar='FID', nargs='*', dest='forget_fid', | |||
help='Delete all info from the database (but not the file itself) about ' | |||
'the files with the giver anidb file-id. (These are the numbers output ' | |||
'by --find-duplicates') | |||
parser.add_argument('--brief', | |||
action='store_true', dest='brief', | |||
help='If nothing goes wrong, print only a single line for each file') | |||
parser.add_argument('--kill', | |||
action='store_true', dest='kill', | |||
help='Shut down the backend') | |||
parser.add_argument('file', | |||
metavar='FILE', type=str, nargs='*', | |||
help='A file to do something with') | |||
args = parser.parse_args() | |||
# Status printing. | |||
LINES = 80 | |||
try: | |||
_, LINES = map(int, os.popen('stty size', 'r').read().split()) | |||
except: | |||
pass | |||
def pp(head): | |||
def inner(s): | |||
res = '' | |||
tag = head | |||
line = '' | |||
parts = str(s).split(' ') | |||
while parts: | |||
if line: | |||
line += ' ' | |||
line += parts.pop(0) | |||
if parts and len(line) + len(parts[0]) > LINES-10: | |||
res += tag + line + '\n' | |||
line = '' | |||
tag = ' ' * len(tag) | |||
return res + tag + line | |||
return inner | |||
legacy = pp('ERROR ') | |||
status = pp(' ') | |||
success = pp('OK ') | |||
error = pp('ERROR ') | |||
debug = pp('debug ') | |||
try: | |||
import colorama | |||
colorama.init() | |||
prnt = { | |||
'legacy': lambda s: | |||
(colorama.Fore.YELLOW + legacy(s) + colorama.Fore.RESET), | |||
'status': status, | |||
'success': lambda s: | |||
(colorama.Fore.GREEN + success(s) + colorama.Fore.RESET), | |||
'error': lambda s: | |||
(colorama.Fore.RED + error(s) + colorama.Fore.RESET), | |||
'debug': lambda s: | |||
(colorama.Fore.CYAN + debug(s) + colorama.Fore.RESET), | |||
} | |||
except ImportError: | |||
prnt = { | |||
'legacy': legacy, | |||
'status': status, | |||
'success': success, | |||
'error': error, | |||
'debug': debug, | |||
} | |||
trans = { | |||
'_': '{0}', | |||
'abandon_ship': 'Well... something went wrong', | |||
'anidb_adding_file': 'Adding {0} {1} to your mylist...', | |||
'anidb_busy': 'AniDB is busy, please try again later.', | |||
'anidb_file_unknown': 'File is unknown to anidb. Will not process further.', | |||
'anidb_marking_watched': 'Marking {0} {1} watched...', | |||
'anidb_ping_error': 'No answer from anidb :(', | |||
'anidb_ping_ok': 'Pinged anidb.', | |||
'backend_shutting_down': 'Backend shutting down...', | |||
'backend_start': 'Unable to contact the backend. Will try to start one...', | |||
'backend_start_failed': 'Unable to start a new backend, sorry :(', | |||
'backend_killed': 'Backend shut down', | |||
'banned': 'We got banned :(\n{0}\nTry again in 30 minutes', | |||
'dup': '- {0} {1} [{2}]', | |||
'dup_no_type': '- {0} {1} [unknown - ' | |||
'please try re-kiara\'ing this file]', | |||
'dups_for': 'Duplicate files for {0} - {1}:', | |||
'dups_forget_others': 'Other users are using that file, I won\'t forget ' | |||
'about that', | |||
'dups_forgot': 'Forgot about {0}', | |||
'dups_none': 'No duplicate files :)', | |||
'file_added': 'File added.', | |||
'file_already_organized': '{0} is already organized.', | |||
'file_copied': 'Copied {0} to {1}', | |||
'file_exists': '{0} already exists, not overwriting without --overwrite', | |||
'file_marked_watched': 'File marked watched', | |||
'file_moved': 'Moved {0} to {1}', | |||
'file_type': 'File type is {0}', | |||
'file_type_location': 'Type is {0}, so I\'ll put this in {1}', | |||
'hashing_file': 'Hashing {0}...', | |||
'kiara_banned': 'kiara is banned from AniDB.\nYour AniDB user should be ok', | |||
'kiara_outdated': 'kiara have become outdated :(\nCheck the interwebs for ' | |||
'an updated version', | |||
'killing_backend': 'Killing backend...', | |||
'logging_in': 'Logging in...', | |||
'login_accepted_outdated_client': 'Login accepted, but your copy of kiara ' | |||
'is outdated.\nPlease consider updating it.', | |||
'login_again': 'We need to log in again ({0} {1})', | |||
'login_successful': 'Login successful.', | |||
'login_session_key': 'We got session key {0}', | |||
'login_unexpected_return': 'Unexpected return code to AUTH command. Please ' | |||
'show this to the delevopers of kiara: {0} {1}', | |||
'no_backend_running': 'No backend running', | |||
'oh_no': 'OH NOES: {0} {1}', | |||
'removing_empty_dir': 'The dir {0} is now empty, will remove it.', | |||
'socket_timeout': 'We got a socket timeout... hang on', | |||
'socket_timeout_again': 'Another timeout... bailing out', | |||
'unexpected_reply': 'Unexpected reply: {0} {1}', | |||
'unknown_file_extension': 'Unknown file extension: {0}', | |||
'wrong_tag': 'We got a message with the wrong tag... we have probably ' | |||
'missed the previous message. I\'ll try again.', | |||
} | |||
def format_status(status): | |||
try: | |||
if type(status) == list and len(status) >= 2: | |||
tag, msg, data = status[0], status[1], status[2:] | |||
if tag != 'debug' or 'debug' in libkiara._config: | |||
return prnt[tag](trans[msg].format(*data)) | |||
else: | |||
return None | |||
except Exception as e: | |||
if 'colorama' in globals(): | |||
print(colorama.Fore.WHITE + colorama.Back.RED) | |||
print(e) | |||
print(colorama.Fore.RESET + colorama.Back.RESET) | |||
else: | |||
print(e) | |||
# Legacy status. | |||
return prnt['legacy'](status) | |||
# Should we kill the backend? | |||
if args.kill: | |||
print(format_status(['status', 'killing_backend', []])) | |||
for line in libkiara.kill(): | |||
print(format_status(line)) | |||
print(format_status(['success', 'backend_killed', []])) | |||
sys.exit() | |||
# Make sure that all the files are actually files. | |||
for file in args.file: | |||
if not os.path.isfile(os.path.abspath(file)): | |||
print('!!! %s is not a file' % file) | |||
sys.exit() | |||
# Load configuration | |||
if args.config: | |||
libkiara.load_config_file(args.config.name) | |||
if not libkiara.check_config(): | |||
sys.exit(-1) | |||
# OK, run over the files. | |||
for file in args.file: | |||
printed = False | |||
msgs = list() | |||
for line in libkiara.process(os.path.abspath(file), | |||
update_info=args.update_info, | |||
watch=args.watch, | |||
organize=args.organize, | |||
organize_copy=args.organize_copy, | |||
organize_overwrite=args.organize_overwrite): | |||
f = format_status(line) | |||
if f: | |||
if args.brief: | |||
if printed: | |||
print(f) | |||
else: | |||
if line[0] == 'error': | |||
while msgs: | |||
print(msgs.pop(0)) | |||
print(f) | |||
printed = True | |||
else: | |||
msgs.append(f) | |||
else: | |||
printed = True | |||
print(f) | |||
if args.brief and not printed: | |||
print(format_status(['success', '_', file])) | |||
elif printed: | |||
print() # Blank line | |||
if args.find_duplicates: | |||
print('Locating duplicate files...') | |||
for line in libkiara.find_duplicates(): | |||
f = format_status(line) | |||
if f: | |||
print(f) | |||
if args.forget_fid: | |||
for line in libkiara.forget(*args.forget_fid): | |||
f = format_status(line) | |||
if f: | |||
print(f) |
@@ -0,0 +1,10 @@ | |||
Metadata-Version: 1.0 | |||
Name: kiara | |||
Version: 1.1.1 | |||
Summary: Kiara updates your anidb list and sorts your anime | |||
Home-page: https://github.com/hartfelt/kiara | |||
Author: Bjørn Hartfelt | |||
Author-email: b.hartfelt@gmail.com | |||
License: UNKNOWN | |||
Description: UNKNOWN | |||
Platform: UNKNOWN |
@@ -0,0 +1,128 @@ | |||
#!/usr/bin/env python3 | |||
import os, os.path | |||
import sys | |||
import socket | |||
import time | |||
# Used to gracefully aborting stuff.. like a hippo exiting a stage | |||
class AbandonShip(BaseException): | |||
pass | |||
# Default config values. | |||
_config = { | |||
'host': 'api.anidb.net', | |||
'port': '9000', | |||
'session': '~/.kiara.session', | |||
'database': '~/.kiara.db', | |||
} | |||
def _config_items(file): | |||
for line in map(lambda s: s.strip(), file.readlines()): | |||
if line.startswith('#') or not line: | |||
continue | |||
yield line.split(None, 1) | |||
def load_config_file(file_name): | |||
global _config | |||
try: | |||
with open(file_name, 'r') as fp: | |||
_config.update(_config_items(fp)) | |||
except: pass | |||
load_config_file('/etc/kiararc') | |||
load_config_file(os.path.expanduser('~/.kiararc')) | |||
def check_config(): | |||
config_ok = True | |||
for key in 'host port user pass database session ' \ | |||
'basepath_movie basepath_series'.split(): | |||
if not key in _config: | |||
print('ERROR: Missing config variable:', key, file=sys.stderr) | |||
config_ok = False | |||
return config_ok | |||
def _send(msg): | |||
def inner(): | |||
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) | |||
client.connect(os.path.expanduser(_config['session'])) | |||
client.sendall(bytes(msg, 'UTF-8')) | |||
data = '' | |||
while True: | |||
data += str(client.recv(1024), 'UTF-8') | |||
if data == '---end---': | |||
client.close() | |||
return | |||
if '\n\n' in data: | |||
item, data = data.split('\n\n', 1) | |||
if '\n' in item: | |||
yield item.split('\n') | |||
else: | |||
yield item | |||
try: | |||
for i in inner(): | |||
yield i | |||
except socket.error: | |||
if msg == '- kill': | |||
# We were unable to contact the backend, good. | |||
yield ['status', 'no_backend_running'] | |||
else: | |||
# Normal procedure | |||
yield ['status', 'backend_start'] | |||
if os.fork(): | |||
# Wait for it... | |||
time.sleep(2) | |||
# Then try the command again. If it fails again, something we | |||
# cannot fix is wrong | |||
for i in inner(): | |||
yield i | |||
return | |||
yield ['error', 'backend_start_failed'] | |||
else: | |||
from libkiara import backend | |||
backend.serve(_config) | |||
def ping(): | |||
wah = False | |||
for l in _send('- ping'): | |||
print(l) | |||
wah = l == 'pong' | |||
return wah | |||
# Backend actions: | |||
# a Add file | |||
# c Copy file instead of moving | |||
# o Organize file | |||
# u Get new file info from anidb when the cache is old | |||
# w Mark file watched | |||
# x Overwrite existing files | |||
# - Extra commands | |||
def process(file, | |||
update_info=True, watch=False, organize=False, organize_copy=False, | |||
organize_overwrite=False): | |||
q = 'a' | |||
if update_info: | |||
q += 'u' | |||
if watch: | |||
q += 'w' | |||
if organize: | |||
q += 'o' | |||
if organize_copy: | |||
q += 'c' | |||
if organize_overwrite: | |||
q += 'x' | |||
for line in _send(q + ' ' + file): | |||
yield line | |||
def find_duplicates(): | |||
for line in _send('- dups'): | |||
yield line | |||
def forget(*fids): | |||
for line in _send('- forget ' + ' '.join(list(map(str, fids)))): | |||
yield line | |||
def kill(): | |||
for line in _send('- kill'): | |||
yield line |
@@ -0,0 +1,265 @@ | |||
from datetime import datetime, timedelta | |||
import os | |||
import random | |||
import socket | |||
import string | |||
import sys | |||
import time | |||
from libkiara import AbandonShip | |||
CLIENT_NAME = "kiara" | |||
CLIENT_VERSION = "4" | |||
CLIENT_ANIDB_PROTOVER = "3" | |||
LOGIN_ACCEPTED = '200' | |||
LOGIN_ACCEPTED_OUTDATED_CLIENT = '201' | |||
LOGGED_OUT = '203' | |||
MYLIST_ENTRY_ADDED = '210' | |||
FILE = '220' | |||
PONG = '300' | |||
FILE_ALREADY_IN_MYLIST = '310' | |||
MYLIST_ENTRY_EDITED = '311' | |||
NO_SUCH_FILE = '320' | |||
NOT_LOGGED_IN = '403' | |||
LOGIN_FAILED = '500' | |||
LOGIN_FIRST = '501' | |||
ACCESS_DENIED = '502' | |||
CLIENT_VERSION_OUTDATED = '503' | |||
CLIENT_BANNED = '504' | |||
ILLEGAL_INPUT = '505' | |||
INVALID_SESSION = '506' | |||
BANNED = '555' | |||
UNKNOWN_COMMAND = '598' | |||
INTERNAL_SERVER_ERROR = '600' | |||
OUT_OF_SERVICE = '601' | |||
SERVER_BUSY = '602' | |||
DIE_MESSAGES = [ | |||
BANNED, ILLEGAL_INPUT, UNKNOWN_COMMAND, | |||
INTERNAL_SERVER_ERROR, ACCESS_DENIED | |||
] | |||
LATER_MESSAGES = [OUT_OF_SERVICE, SERVER_BUSY] | |||
REAUTH_MESSAGES = [LOGIN_FIRST, INVALID_SESSION] | |||
# This will get overridden from kiarad.py | |||
config = None | |||
session_key = None | |||
sock = None | |||
# anidb specifies a hard limit that no more than one message every 2 seconds | |||
# may me send, and a soft one at one message every 4 seconds over an 'extended | |||
# period'. 3 seconds is... faster than 4... | |||
message_interval = timedelta(seconds=3) | |||
next_message = datetime.now() | |||
# Wrap outputting | |||
OUTPUT = None | |||
output_queue = list() | |||
def output(*args): | |||
global OUTPUT | |||
try: | |||
if OUTPUT: | |||
OUTPUT(list(args)) | |||
except TypeError: # OUTPUT is not a function. | |||
OUTPUT = None | |||
output_queue.append(args) | |||
def set_output(o): | |||
global OUTPUT | |||
while output_queue: | |||
o(output_queue.pop(0)) | |||
OUTPUT = o | |||
def tag_gen(length=5): | |||
""" Makes random strings for use as tags, so messages from anidb will not | |||
get mixed up. """ | |||
return "".join([ | |||
random.choice(string.ascii_letters) | |||
for _ in range(length)]) | |||
def _comm(command, **kwargs): | |||
global next_message, session_key | |||
assert sock != None | |||
wait = (next_message - datetime.now()).total_seconds() | |||
if wait > 0: | |||
time.sleep(wait) | |||
next_message = datetime.now() + message_interval | |||
# Add a tag | |||
tag = tag_gen() | |||
kwargs['tag'] = tag | |||
# And the session key, if we have one | |||
if session_key: | |||
kwargs['s'] = session_key | |||
# Send shit. | |||
shit = (command + " " + "&".join( | |||
map(lambda k: "%s=%s" % (k, kwargs[k]), kwargs))) | |||
output('debug', '_', | |||
'--> %s' % (shit if command is not 'AUTH' else 'AUTH (hidden)')) | |||
sock.send(shit.encode('ascii')) | |||
# Receive shit | |||
while True: | |||
try: | |||
reply = sock.recv(1400).decode().strip() | |||
except socket.timeout: | |||
# Wait... | |||
output('status', 'socket_timeout') | |||
time.sleep(10) | |||
try: | |||
reply = sock.recv(1400).decode().strip() | |||
except socket.timeout: | |||
# Retry it only once. If this fails, anidb is either broken, or | |||
# blocking us | |||
output('error', 'socket_timeout_again') | |||
raise AbandonShip | |||
output('debug', '_', '<-- %s' % reply) | |||
if reply[0:3] == "555" or reply[6:9] == '555': | |||
output('error', 'banned', reply) | |||
raise AbandonShip | |||
return_tag, code, data = reply.split(' ', 2) | |||
if return_tag == tag: | |||
break | |||
else: | |||
output('debug', 'wrong_tag') | |||
# If this was a transmission error, or an anidb error, we will hit | |||
# a timeout and die... | |||
if code in DIE_MESSAGES: | |||
output('error', 'oh_no', code, data) | |||
raise AbandonShip | |||
if code in LATER_MESSAGES: | |||
output('error', 'anidb_busy') | |||
raise AbandonShip | |||
if code in REAUTH_MESSAGES: | |||
output('status', 'login_again', code, data) | |||
_connect(force=True) | |||
return _comm(command, **kwargs) | |||
return code, data | |||
def ping(redirect): | |||
set_output(redirect.reply) | |||
_connect(needs_auth=False) | |||
code, reply = _comm('PING') | |||
if code == PONG: | |||
return True | |||
output('error', 'unexpected_reply', code, reply) | |||
return False | |||
def _connect(force=False, needs_auth=True): | |||
global session_key, sock | |||
if not sock: | |||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) | |||
sock.connect((config['host'], int(config['port']))) | |||
sock.settimeout(10) | |||
# If we have a session key, we assume that we are connected. | |||
if (not session_key and needs_auth) or force: | |||
output('status', 'logging_in') | |||
code, key = _comm( | |||
'AUTH', | |||
user=config['user'], | |||
protover=CLIENT_ANIDB_PROTOVER, | |||
client=CLIENT_NAME, | |||
clientver=CLIENT_VERSION, | |||
**{'pass': config['pass']} # We cannot use pass as a name :( | |||
) | |||
if code == LOGIN_ACCEPTED_OUTDATED_CLIENT: | |||
output('status', 'login_accepted_outdated_client') | |||
elif code == LOGIN_ACCEPTED: | |||
pass | |||
elif code in CLIENT_VERSION_OUTDATED: | |||
output('error', 'kiara_outdated') | |||
raise AbandonShip | |||
elif code in CLIENT_BANNED: | |||
output('error', 'kiara_banned') | |||
sys.exit() | |||
else: | |||
output('error', 'login_unexpected_return', code, key) | |||
raise AbandonShip | |||
session_key = key.split()[0] | |||
output('status', 'login_successful') | |||
output('debug', 'login_session_key', session_key) | |||
def _type_map(ext): | |||
if ext in ['mpg', 'mpeg', 'avi', 'mkv', 'ogm', 'mp4', 'wmv']: | |||
return 'vid' | |||
if ext in ['ssa', 'sub', 'ass']: | |||
return 'sub' | |||
if ext in ['flac', 'mp3']: | |||
return 'snd' | |||
output('error', 'unknown_file_extension', str(ext)) | |||
return None | |||
def load_info(thing, redirect): | |||
set_output(redirect.reply) | |||
_connect() | |||
lookup = { | |||
'fmask': '48080100a0', | |||
'amask': '90808040', | |||
} | |||
if thing.fid: | |||
lookup['fid'] = thing.fid | |||
else: | |||
lookup['size'] = thing.size | |||
lookup['ed2k'] = thing.hash | |||
code, reply = _comm('FILE', **lookup) | |||
if code == NO_SUCH_FILE: | |||
output('error', 'anidb_file_unknown') | |||
elif code == FILE: | |||
parts = reply.split('\n')[1].split('|') | |||
parts.reverse() | |||
thing.fid = int(parts.pop()) | |||
thing.aid = int(parts.pop()) | |||
thing.mylist_id = int(parts.pop()) | |||
thing.crc32 = parts.pop() | |||
thing.file_type = _type_map(parts.pop()) | |||
output('debug', 'file_type', thing.file_type) | |||
thing.added = parts.pop() == '1' | |||
thing.watched = parts.pop() == '1' | |||
thing.anime_total_eps = int(parts.pop()) | |||
thing.anime_type = parts.pop() | |||
thing.anime_name = parts.pop() | |||
thing.ep_no = parts.pop() | |||
thing.group_name = parts.pop() | |||
thing.updated = datetime.now() | |||
thing.dirty = True | |||
def add(thing, redirect): | |||
set_output(redirect.reply) | |||
_connect() | |||
code, reply = _comm('MYLISTADD', | |||
fid=str(thing.fid), | |||
state='1') | |||
if code == MYLIST_ENTRY_ADDED: | |||
thing.mylist_id = reply.split('\n')[1] | |||
thing.added = True | |||
thing.dirty = True | |||
output('success', 'file_added') | |||
elif code == FILE_ALREADY_IN_MYLIST: | |||
thing.mylist_id = reply.split('\n')[1].split('|')[0] | |||
thing.added = True | |||
thing.dirty = True | |||
output('success', 'file_added') | |||
else: | |||
output('error', 'unexpected_reply', code, reply) | |||
def watch(thing, redirect): | |||
set_output(redirect.reply) | |||
_connect() | |||
code, reply = _comm('MYLISTADD', | |||
lid=str(thing.mylist_id), | |||
edit='1', state='1', viewed='1') | |||
if code == MYLIST_ENTRY_EDITED: | |||
thing.watched = True | |||
thing.dirty = True | |||
output('success', 'file_marked_watched') | |||
else: | |||
output('error', 'unexpected_reply', code, reply) |
@@ -0,0 +1,248 @@ | |||
import os.path | |||
import sys | |||
import shutil | |||
from datetime import datetime, timedelta | |||
import socketserver | |||
import socket # for the exceptions | |||
from libkiara import ed2khash, database, anidb, AbandonShip | |||
config = {} | |||
# Define a dump object to pass around. | |||
class KiaraFile(object): | |||
def __init__(self, name): | |||
self.file = open(name, 'rb') | |||
self.file_name = name | |||
self.name = os.path.basename(name) | |||
self.size = os.path.getsize(name) | |||
self.dirty = False # Should this be saved. | |||
self.updated = None | |||
self.in_anidb = True | |||
self.hash = None | |||
self.watched = False | |||
self.fid = None | |||
self.mylist_id = None | |||
self.aid = None | |||
self.crc32 = None | |||
self.type = None | |||
self.anime_total_eps = None | |||
self.anime_name = None | |||
self.anime_type = None | |||
self.ep_no = None | |||
self.group_name = None | |||
self.added = False | |||
def misses_info(self): | |||
return ( | |||
self.fid == None or | |||
self.mylist_id == None or | |||
self.aid == None or | |||
self.crc32 == None or | |||
self.file_type == None or | |||
self.anime_total_eps == None or | |||
self.anime_name == None or | |||
self.anime_type == None or | |||
self.ep_no == None or | |||
self.group_name == None) | |||
def is_movie(self): | |||
return ( | |||
self.anime_type == 'Movie' or | |||
self.anime_type == 'OVA' and self.anime_total_eps == 1 or | |||
self.anime_type == 'Web' and self.anime_total_eps == 1) | |||
def __str__(self): | |||
parts = [self.name] | |||
if self.hash: | |||
parts.append(self.hash) | |||
if self.dirty: | |||
parts.append('(unsaved)') | |||
return ' '.join(parts) | |||
def makedirs(path): | |||
parts = os.path.abspath(path).split(os.path.sep) | |||
path = '/' | |||
while parts: | |||
path = os.path.join(path, parts.pop(0)) | |||
if not os.path.exists(path): | |||
os.makedirs(path) | |||
def rmdirp(path): | |||
while path: | |||
if os.listdir(path) == []: | |||
yield ['status', 'removing_empty_dir', path] | |||
os.rmdir(path) | |||
path = os.path.dirname(path) | |||
else: | |||
return | |||
def pad(length, num): | |||
try: | |||
int(num) | |||
return "0" * max(0, (length - len(num))) + num | |||
except ValueError: | |||
return num | |||
class Handler(socketserver.BaseRequestHandler): | |||
def __init__(self, *args, **kwargs): | |||
self.queued_messages = [] | |||
return super().__init__(*args, **kwargs) | |||
def reply(self, message, catch_fails=True): | |||
if type(message) == tuple: | |||
message = list(message) | |||
if type(message) == list: | |||
message = '\n'.join(message) | |||
self.write(message + '\n', catch_fails) | |||
def write(self, message, catch_fails=True): | |||
try: | |||
self.request.send(bytes(message+'\n', 'UTF-8')) | |||
except socket.error: | |||
if catch_fails: | |||
self.queued_messages.append(message) | |||
def handle(self): | |||
while self.queued_messages: | |||
self.reply(self.queued_messages.pop(0), False) | |||
data = self.request.recv(1024).strip().decode('UTF-8') | |||
act, file_name = data.split(' ', 1) | |||
if act == '-': | |||
# Non-file related commands | |||
if file_name == 'ping': | |||
if anidb.ping(self): | |||
self.reply(['success', 'anidb_ping_ok']) | |||
else: | |||
self.reply(['error', 'anidb_ping_error']) | |||
if file_name == 'dups': | |||
dups = False | |||
for line in database.find_duplicates(): | |||
dups = True | |||
self.reply(line) | |||
if not dups: | |||
self.reply(['success', 'dups_none']) | |||
if file_name.startswith('forget'): | |||
for fid in file_name.split(' ')[1:]: | |||
for line in database.forget(int(fid)): | |||
self.reply(line) | |||
if file_name == 'kill': | |||
self.reply(['status', 'backend_shutting_down']) | |||
self.shutdown() | |||
else: | |||
try: | |||
# File related commands | |||
file = KiaraFile(file_name) | |||
# Load file info. | |||
database.load(file) | |||
if not file.hash: | |||
self.reply(['status', 'hashing_file', file.name]) | |||
file.hash = ed2khash.hash(file.file) | |||
database.load(file) | |||
if file.misses_info() or not file.updated or \ | |||
'u' in act and \ | |||
file.updated < datetime.now() - timedelta(days=7): | |||
anidb.load_info(file, self) | |||
if not file.fid: | |||
self.reply(['error', 'anidb_file_unknown']) | |||
else: | |||
if (not file.added) and 'a' in act: | |||
self.reply(['status', 'anidb_adding_file', | |||
file.anime_name, str(file.ep_no)]) | |||
anidb.add(file, self) | |||
if not file.watched and 'w' in act: | |||
self.reply(['status', 'anidb_marking_watched', | |||
file.anime_name, str(file.ep_no)]) | |||
anidb.watch(file, self) | |||
if 'o' in act: | |||
anime_name = file.anime_name.replace('/', '_') | |||
dir = os.path.join(os.path.expanduser(( | |||
config['basepath_movie'] | |||
if file.is_movie() | |||
else config['basepath_series'])), anime_name) | |||
self.reply(['debug', 'file_type_location', | |||
file.anime_type, dir]) | |||
makedirs(os.path.normpath(dir)) | |||
new_name = None | |||
if file.anime_total_eps == "1": | |||
new_name = "[%s] %s [%s]%s" % ( | |||
file.group_name, anime_name, file.crc32, | |||
os.path.splitext(file_name)[1]) | |||
else: | |||
new_name = "[%s] %s - %s [%s]%s" % ( | |||
file.group_name, anime_name, | |||
pad( | |||
len(str(file.anime_total_eps)), | |||
str(file.ep_no)), | |||
file.crc32, os.path.splitext(file_name)[1]) | |||
new_path = os.path.join(dir, new_name) | |||
if file_name == new_path: | |||
self.reply(['status', 'file_already_organized', | |||
new_name]) | |||
else: | |||
if os.path.isfile(new_path) and not 'x' in act: | |||
self.reply(['error', 'file_exists', new_path]) | |||
else: | |||
if 'c' in act: | |||
shutil.copyfile(file_name, new_path) | |||
self.reply(['success', 'file_copied', | |||
file_name, new_path]) | |||
else: | |||
shutil.move(file_name, new_path) | |||
self.reply(['success', 'file_moved', | |||
file_name, new_path]) | |||
file.name = new_name | |||
file.dirty = True | |||
for r in rmdirp(os.path.dirname(file_name)): | |||
self.reply(r) | |||
database.save(file) | |||
except SystemExit as e: | |||
self.request.sendall(bytes('---end---', 'UTF-8')) | |||
self.request.close() | |||
sys.exit(status) | |||
except AbandonShip: | |||
self.reply(['error', 'abandon_ship']) | |||
# Ignore the actual error, the connection will be closed now | |||
self.request.sendall(bytes('---end---', 'UTF-8')) | |||
def serve(cfg): | |||
global config | |||
config = cfg | |||
anidb.config = config | |||
database.connect(os.path.expanduser(config['database']), config['user']) | |||
try: | |||
os.remove(os.path.expanduser(config['session'])) | |||
except: pass | |||
run = [True] | |||
def killer(r): | |||
r[0] = False | |||
class ActualHandler(Handler): | |||
def __init__(self, *args, **kwargs): | |||
self.shutdown = lambda: killer(run) | |||
return super().__init__(*args, **kwargs) | |||
server = socketserver.UnixStreamServer( | |||
os.path.expanduser(config['session']), ActualHandler) | |||
while run[0]: | |||
server.handle_request() |
@@ -0,0 +1,212 @@ | |||
import sqlite3 | |||
from datetime import datetime | |||
conn, username, db = None, None, None | |||
def connect(database, user): | |||
global conn, username, db | |||
conn = sqlite3.connect(database) | |||
username = user | |||
db = database # For reconnecting; | |||
c = conn.cursor() | |||
# Create tables if they do not exist. | |||
c.execute(''' | |||
CREATE TABLE IF NOT EXISTS file ( | |||
hash text, | |||
filename text, | |||
size integer, | |||
fid integer, | |||
aid integer, | |||
crc32 text, | |||
ep_no text, | |||
group_name text, | |||
file_type text, | |||
updated text | |||
) | |||
''') | |||
c.execute(''' | |||
CREATE TABLE IF NOT EXISTS file_status ( | |||
fid integer, | |||
username text, | |||
watched boolean, | |||
mylist_id integer, | |||
updated text | |||
); | |||
''') | |||
c.execute(''' | |||
CREATE TABLE IF NOT EXISTS anime ( | |||
aid integer, | |||
total_eps integer, | |||
name text, | |||
type text, | |||
updated text | |||
); | |||
''') | |||
conn.commit() | |||
def _check_connection(): | |||
c = conn.cursor() | |||
try: | |||
c.execute('select 1 from file') | |||
c.fetchall() | |||
except sqlite3.OperationalError: | |||
conn.close() | |||
connect(db, username) | |||
def load(thing): | |||
_check_connection() | |||
c = conn.cursor() | |||
# Lookup thing by name | |||
if not thing.hash: | |||
c.execute(''' | |||
SELECT hash, fid, aid, crc32, ep_no, group_name, file_type, updated | |||
FROM file | |||
WHERE filename = ? AND size = ? | |||
''', (thing.name, thing.size)) | |||
r = c.fetchone() | |||
if r: | |||
thing.hash, thing.fid, thing.aid, thing.crc32, thing.ep_no, \ | |||
thing.group_name, thing.file_type = r[:7] | |||
thing.updated = datetime.strptime(r[7], '%Y-%m-%d %H:%M:%S.%f') | |||
# Lookup thing by hash | |||
if thing.hash: | |||
c.execute(''' | |||
SELECT | |||
filename, fid, aid, crc32, ep_no, group_name, file_type, updated | |||
FROM file | |||
WHERE hash = ? AND size = ? | |||
''', (thing.hash, thing.size)) | |||
r = c.fetchone() | |||
if not r: | |||
# This is a new thing | |||
thing.dirty = True | |||
return | |||
if r[0] != thing.name: | |||
thing.dirty = True | |||
thing.fid, thing.aid, thing.crc32, thing.ep_no, thing.group_name, \ | |||
thing.file_type = r[1:7] | |||
thing.updated = datetime.strptime(r[7], '%Y-%m-%d %H:%M:%S.%f') | |||
if thing.fid: | |||
# Look up the status. | |||
c.execute(''' | |||
SELECT watched, mylist_id, updated | |||
FROM file_status | |||
WHERE fid = ? AND username = ? | |||
''', (thing.fid, username)) | |||
r = c.fetchone() | |||
if r: | |||
thing.mylist_id = r[0] | |||
thing.added = bool(r[0]) | |||
thing.watched = bool(r[1]) | |||
thing.updated = min( | |||
thing.updated, | |||
datetime.strptime(r[2], '%Y-%m-%d %H:%M:%S.%f')) | |||
if thing.aid: | |||
c.execute(''' | |||
SELECT total_eps, name, type, updated | |||
FROM anime | |||
WHERE aid = ? | |||
''', (thing.aid, )) | |||
r = c.fetchone() | |||
if r: | |||
thing.anime_total_eps, thing.anime_name, thing.anime_type = r[:3] | |||
thing.updated = min( | |||
thing.updated, | |||
datetime.strptime(r[3], '%Y-%m-%d %H:%M:%S.%f')) | |||
def save(thing): | |||
_check_connection() | |||
if thing.dirty: | |||
c = conn.cursor() | |||
c.execute(''' | |||
DELETE FROM file | |||
WHERE hash = ? AND size = ? OR fid = ? | |||
''', (thing.hash, thing.size, thing.fid)) | |||
c.execute(''' | |||
INSERT INTO file ( | |||
hash, filename, size, fid, aid, crc32, ep_no, | |||
group_name, file_type, updated) | |||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) | |||
''', ( | |||
thing.hash, thing.name, thing.size, thing.fid, thing.aid, | |||
thing.crc32, thing.ep_no, thing.group_name, thing.file_type, | |||
str(thing.updated))) | |||
c.execute(''' | |||
DELETE FROM file_status | |||
WHERE fid = ? AND username = ? | |||
''', (thing.fid, username)) | |||
if thing.added: | |||
c.execute(''' | |||
INSERT INTO file_status ( | |||
fid, username, watched, mylist_id, updated) | |||
VALUES (?, ?, ?, ?, ?) | |||
''', ( | |||
thing.fid, username, thing.watched, thing.mylist_id, | |||
str(thing.updated))) | |||
c.execute(''' | |||
DELETE FROM anime | |||
WHERE aid = ? | |||
''', (thing.aid, )) | |||
c.execute(''' | |||
INSERT INTO anime (aid, total_eps, name, type, updated) | |||
VALUES (?, ?, ?, ?, ?) | |||
''', ( | |||
thing.aid, thing.anime_total_eps, thing.anime_name, | |||
thing.anime_type, str(thing.updated))) | |||
conn.commit() | |||
def find_duplicates(): | |||
_check_connection() | |||
c = conn.cursor() | |||
f = conn.cursor() | |||
c.execute(''' | |||
SELECT DISTINCT a.aid, anime.name, a.ep_no | |||
FROM file a, file b, anime | |||
WHERE | |||
a.aid = b.aid AND | |||
a.aid = anime.aid AND | |||
a.ep_no = b.ep_no AND | |||
a.hash != b.hash AND ( | |||
(a.file_type ISNULL) OR | |||
(b.file_type ISNULL) OR | |||
a.file_type = b.file_type | |||
) | |||
''') | |||
for aid, name, ep in c.fetchall(): | |||
yield ['status', 'dups_for', name, str(ep)] | |||
f.execute(''' | |||
SELECT fid, filename, file_type | |||
FROM file | |||
WHERE aid = ? and ep_no = ? | |||
''', (aid, ep)) | |||
for fid, name, type in f.fetchall(): | |||
if not type: | |||
yield ['status', 'dup_no_type', str(fid), name] | |||
else: | |||
yield ['status', 'dup', str(fid), name, type] | |||
def forget(fid): | |||
_check_connection() | |||
c = conn.cursor() | |||
c.execute( | |||
'DELETE FROM file_status WHERE fid = ? AND username = ?', | |||
(fid, username)) | |||
c.execute( | |||
'SELECT count(*) FROM file_status WHERE fid = ?', | |||
(fid,)) | |||
if c.fetchone() != (0,): | |||
yield ['error', 'dups_forget_in_use'] | |||
else: | |||
c.execute('DELETE FROM file WHERE fid = ?', (fid,)) | |||
yield ['status', 'dups_forgot', str(fid)] | |||
conn.commit() |
@@ -0,0 +1,41 @@ | |||
#!/usr/bin/env python3 | |||
''' implementation of ed2k-hashing in python. original code stolen from | |||
http://www.radicand.org/blog/orz/2010/2/21/edonkey2000-hash-in-python/''' | |||
import hashlib | |||
import os.path | |||
from functools import reduce | |||
_md4 = hashlib.new('md4').copy | |||
def _chuncks(f): | |||
while True: | |||
x = f.read(9728000) | |||
if x: | |||
yield x | |||
else: | |||
return | |||
def _md4_hash(data): | |||
m = _md4() | |||
m.update(data) | |||
return m | |||
def hash(file): | |||
""" Returns the ed2k hash of the given file. """ | |||
hashes = [_md4_hash(data) for data in _chuncks(file)] | |||
if len(hashes) == 1: | |||
return hashes[0].hexdigest() | |||
else: | |||
return _md4_hash( | |||
reduce(lambda a, b: a + b.digest(), hashes, b'') | |||
).hexdigest() | |||
def link(file): | |||
""" Returns the ed2k link of the given file. """ | |||
return "ed2k://|file|%s|%d|%s|" % ( | |||
os.path.basename(file.name), | |||
os.path.getsize(file.name), | |||
hash(file) | |||
) |