2016-04-30 01:56:40 +00:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
ttbp: tilde town blogging platform
|
|
|
|
(also known as the feels engine)
|
|
|
|
a console-based blogging program developed for tilde.town
|
|
|
|
copyright (c) 2016 ~endorphant (endorphant@tilde.town)
|
|
|
|
|
|
|
|
core.py:
|
|
|
|
this is a core handler for some ttbp standalone/output functions
|
|
|
|
|
2016-06-15 01:51:49 +00:00
|
|
|
Permission is hereby granted, free of charge, to any person obtaining
|
|
|
|
a copy of this software and associated documentation files (the
|
|
|
|
"Software"), to deal in the Software without restriction, including
|
|
|
|
without limitation the rights to use, copy, modify, merge, publish,
|
|
|
|
distribute, sublicense, and/or sell copies of the Software, and to
|
|
|
|
permit persons to whom the Software is furnished to do so, subject to
|
|
|
|
the following conditions:
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be
|
|
|
|
included in all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
|
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
|
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
|
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
|
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
|
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
|
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
2016-05-22 02:18:25 +00:00
|
|
|
|
|
|
|
the complete codebase is available at:
|
|
|
|
https://github.com/modgethanc/ttbp
|
|
|
|
'''
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
import os
|
2016-05-02 15:26:51 +00:00
|
|
|
import time
|
2016-05-04 15:00:28 +00:00
|
|
|
import subprocess
|
2016-05-11 04:10:04 +00:00
|
|
|
import re
|
2016-05-15 05:19:44 +00:00
|
|
|
import mistune
|
2016-05-27 07:21:11 +00:00
|
|
|
import json
|
2016-05-02 15:26:51 +00:00
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
import chatter
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-04-30 23:47:49 +00:00
|
|
|
SOURCE = os.path.join("/home", "endorphant", "projects", "ttbp", "bin")
|
|
|
|
USER = os.path.basename(os.path.expanduser("~"))
|
|
|
|
PATH = os.path.join("/home", USER, ".ttbp")
|
|
|
|
|
|
|
|
LIVE = "http://tilde.town/~"
|
|
|
|
WWW = os.path.join(PATH, "www")
|
2016-04-30 04:00:55 +00:00
|
|
|
CONFIG = os.path.join(PATH, "config")
|
2016-04-30 23:47:49 +00:00
|
|
|
DATA = os.path.join(PATH, "entries")
|
2016-05-15 06:36:42 +00:00
|
|
|
FEED = os.path.join(SOURCE, "www", "index.html")
|
2016-05-20 04:32:50 +00:00
|
|
|
DOCS = os.path.join(SOURCE, "www", "help.html")
|
2016-05-20 04:06:21 +00:00
|
|
|
NOPUB = os.path.join(CONFIG, "nopub")
|
2016-05-27 07:21:11 +00:00
|
|
|
SETTINGS = {}
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
HEADER = ""
|
|
|
|
FOOTER = ""
|
|
|
|
FILES = []
|
|
|
|
|
2016-05-27 07:31:46 +00:00
|
|
|
def load(ttbprc={}):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
get all them globals set up!!
|
|
|
|
'''
|
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
global HEADER
|
|
|
|
global FOOTER
|
2016-05-27 07:21:11 +00:00
|
|
|
global SETTINGS
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
HEADER = open(os.path.join(CONFIG, "header.txt")).read()
|
|
|
|
FOOTER = open(os.path.join(CONFIG, "footer.txt")).read()
|
2016-05-27 07:21:11 +00:00
|
|
|
SETTINGS = ttbprc
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-01 23:34:20 +00:00
|
|
|
load_files()
|
|
|
|
|
2016-06-10 02:42:08 +00:00
|
|
|
def reload_ttbprc(ttbprc={}):
|
|
|
|
'''
|
|
|
|
reloads new ttbprc into current session
|
|
|
|
'''
|
|
|
|
|
|
|
|
global SETTINGS
|
|
|
|
|
|
|
|
SETTINGS = ttbprc
|
|
|
|
|
2016-05-01 23:34:20 +00:00
|
|
|
def load_files():
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
file loader
|
|
|
|
|
|
|
|
* reads user's nopub file
|
|
|
|
* loads all valid filenames that are not excluded in nopub to global files list
|
|
|
|
'''
|
|
|
|
|
2016-05-01 23:34:20 +00:00
|
|
|
global FILES
|
|
|
|
|
|
|
|
FILES = []
|
2016-05-31 19:03:34 +00:00
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
for filename in os.listdir(DATA):
|
2016-05-31 19:03:34 +00:00
|
|
|
if nopub(filename):
|
2016-06-10 02:42:08 +00:00
|
|
|
link = os.path.join(WWW, os.path.splitext(os.path.basename(filename))[0]+".html")
|
|
|
|
if os.path.exists(link):
|
|
|
|
subprocess.call(["rm", link])
|
2016-05-20 04:06:21 +00:00
|
|
|
continue
|
2016-05-03 17:14:53 +00:00
|
|
|
filename = os.path.join(DATA, filename)
|
2016-05-11 04:10:04 +00:00
|
|
|
if os.path.isfile(filename) and valid(filename):
|
2016-05-03 17:14:53 +00:00
|
|
|
FILES.append(filename)
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
FILES.sort()
|
|
|
|
FILES.reverse()
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
|
|
|
|
## html outputting
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
def write(outurl="default.html"):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
main page renderer
|
|
|
|
|
|
|
|
* takes everything currently in FILES and writes a single non-paginated html
|
|
|
|
file
|
|
|
|
* calls write_page() on each file to make permalinks
|
|
|
|
'''
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
outfile = open(os.path.join(WWW, outurl), "w")
|
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("<!--generated by the tilde.town blogging platform on "+time.strftime("%d %B %y")+"\nhttp://tilde.town/~endorphant/ttbp/-->\n\n")
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
for line in HEADER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("\n")
|
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
for filename in FILES:
|
|
|
|
write_page(filename)
|
|
|
|
for line in write_entry(filename):
|
2016-04-30 01:56:40 +00:00
|
|
|
outfile.write(line)
|
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("\n")
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
for line in FOOTER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.close()
|
|
|
|
|
2016-04-30 23:47:49 +00:00
|
|
|
return os.path.join(LIVE+USER,os.path.basename(os.path.realpath(WWW)),outurl)
|
2016-04-30 04:00:55 +00:00
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
def write_page(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
permalink generator
|
|
|
|
|
|
|
|
* makes a page out of a single entry for permalinking, using filename/date as
|
|
|
|
url
|
|
|
|
'''
|
2016-05-03 17:14:53 +00:00
|
|
|
|
|
|
|
outurl = os.path.join(WWW, "".join(parse_date(filename))+".html")
|
|
|
|
outfile = open(outurl, "w")
|
|
|
|
|
|
|
|
outfile.write("<!--generated by the tilde.town blogging platform on "+time.strftime("%d %B %y")+"\nhttp://tilde.town/~endorphant/ttbp/-->\n\n")
|
|
|
|
|
|
|
|
for line in HEADER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.write("\n")
|
|
|
|
|
|
|
|
for line in write_entry(filename):
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.write("\n")
|
|
|
|
|
|
|
|
for line in FOOTER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.close()
|
|
|
|
|
|
|
|
return outurl
|
|
|
|
|
|
|
|
def write_entry(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
entry text generator
|
|
|
|
|
|
|
|
* dump given file into entry format by parsing file as markdown
|
|
|
|
* return as list of strings
|
|
|
|
'''
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
date = parse_date(filename)
|
2016-04-30 02:09:30 +00:00
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
entry = [
|
2016-04-30 02:09:30 +00:00
|
|
|
"\t\t<p><a name=\""+date[0]+date[1]+date[2]+"\"></a><br /><br /></p>\n",
|
2016-04-30 01:56:40 +00:00
|
|
|
"\t\t<div class=\"entry\">\n",
|
2016-05-15 06:04:02 +00:00
|
|
|
"\t\t\t<h5><a href=\"#"+date[0]+date[1]+date[2]+"\">"+date[2]+"</a> "+chatter.month(date[1])+" "+date[0]+"</h5>\n"
|
2016-05-15 05:19:44 +00:00
|
|
|
#"\t\t\t<P>"
|
2016-04-30 01:56:40 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
raw = []
|
2016-05-03 17:14:53 +00:00
|
|
|
rawfile = open(os.path.join(DATA, filename), "r")
|
2016-04-30 01:56:40 +00:00
|
|
|
|
|
|
|
for line in rawfile:
|
|
|
|
raw.append(line)
|
|
|
|
rawfile.close()
|
|
|
|
|
2016-05-15 05:19:44 +00:00
|
|
|
entry.append("\t\t\t"+mistune.markdown("".join(raw), escape=False, hard_wrap=False))
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-15 05:19:44 +00:00
|
|
|
#for line in raw:
|
|
|
|
#entry.append(line+"\t\t\t")
|
|
|
|
#if line == "\n":
|
|
|
|
# entry.append("</p>\n\t\t\t<p>")
|
|
|
|
|
|
|
|
#entry.append("</p>\n")
|
2016-05-03 17:14:53 +00:00
|
|
|
entry.append("\t\t\t<p style=\"font-size:.6em; font-color:#808080; text-align: right;\"><a href=\""+"".join(date)+".html\">permalink</a></p>\n")
|
|
|
|
entry.append("\n\t\t</div>\n")
|
2016-04-30 01:56:40 +00:00
|
|
|
|
|
|
|
return entry
|
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
def write_global_feed(blogList):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-05-27 07:21:11 +00:00
|
|
|
main ttbp index printer
|
2016-05-22 02:18:25 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
* sources README.md for documentation
|
|
|
|
* takes incoming list of formatted blog links for all publishing blogs and
|
|
|
|
prints to blog feed
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
outfile = open(FEED, "w")
|
2016-04-30 02:09:30 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
## header
|
|
|
|
outfile.write("""\
|
|
|
|
<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 3.2//EN\">
|
|
|
|
<html>
|
|
|
|
<head>
|
|
|
|
<title>tilde.town feels engine</title>
|
|
|
|
<link rel=\"stylesheet\" href=\"style.css\" />
|
|
|
|
</head>
|
|
|
|
<body>
|
|
|
|
<div class="meta">
|
|
|
|
<h1>tilde.town feels engine</h1>
|
2016-04-30 02:09:30 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
<h2><a href="https://github.com/modgethanc/ttbp">github
|
|
|
|
repo</a> | <a
|
|
|
|
href="http://tilde.town/~endorphant/blog/20160510.html">state
|
|
|
|
of the ttbp</a></h2>
|
|
|
|
<!--<p>curious? run <b>~endorphant/bin/ttbp</b> while logged in to tilde.town.</p>
|
|
|
|
<p>it's still a little volatile. let me know if anything breaks.</p>---></div>
|
|
|
|
<p> </p>
|
|
|
|
""")
|
|
|
|
|
|
|
|
## docs
|
|
|
|
outfile.write("""\
|
|
|
|
<div class="docs">""")
|
|
|
|
outfile.write(mistune.markdown(open(os.path.join(SOURCE, "..", "README.md"), "r").read()))
|
|
|
|
outfile.write("""\
|
|
|
|
</div>""")
|
|
|
|
|
|
|
|
## feed
|
|
|
|
outfile.write("""\
|
|
|
|
<p> </p>
|
|
|
|
<div class=\"feed\">
|
|
|
|
<h3>live feels-sharing:</h3>
|
|
|
|
<ul>""")
|
|
|
|
for blog in blogList:
|
|
|
|
outfile.write("""
|
|
|
|
<li>"""+blog+"""</li>\
|
|
|
|
""")
|
|
|
|
|
|
|
|
## footer
|
|
|
|
outfile.write("""
|
|
|
|
</ul>
|
|
|
|
</div>
|
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
""")
|
|
|
|
|
|
|
|
outfile.close()
|
|
|
|
|
|
|
|
## misc helpers
|
2016-05-04 15:00:28 +00:00
|
|
|
|
|
|
|
def meta(entries = FILES):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
metadata generator
|
|
|
|
|
|
|
|
* takes a list of filenames and returns a 2d list:
|
|
|
|
[0] absolute path
|
2016-05-22 04:45:04 +00:00
|
|
|
[1] mtime
|
2016-05-22 02:18:25 +00:00
|
|
|
[2] wc -w
|
|
|
|
[3] timestamp "DD month YYYY at HH:MM"
|
|
|
|
[4] entry date YYYY-MM-DD
|
|
|
|
[5] author
|
|
|
|
|
|
|
|
* sorted in reverse date order by [4]
|
|
|
|
'''
|
2016-05-04 15:00:28 +00:00
|
|
|
|
|
|
|
meta = []
|
|
|
|
|
2016-05-05 00:04:16 +00:00
|
|
|
for filename in entries:
|
2016-05-22 04:45:04 +00:00
|
|
|
mtime = os.path.getmtime(filename)
|
2016-08-31 18:36:37 +00:00
|
|
|
try:
|
|
|
|
wc = subprocess.check_output(["wc","-w",filename], stderr=subprocess.STDOUT).split()[0]
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
wc = "???"
|
2016-05-22 04:45:04 +00:00
|
|
|
timestamp = time.strftime("%Y-%m-%d at %H:%M", time.localtime(mtime))
|
2016-05-04 15:00:28 +00:00
|
|
|
date = "-".join(parse_date(filename))
|
2016-05-05 00:04:16 +00:00
|
|
|
author = os.path.split(os.path.split(os.path.split(os.path.split(filename)[0])[0])[0])[1]
|
|
|
|
|
2016-05-22 04:45:04 +00:00
|
|
|
meta.append([filename, mtime, wc, timestamp, date, author])
|
2016-05-04 15:00:28 +00:00
|
|
|
|
|
|
|
meta.sort(key = lambda filename:filename[4])
|
|
|
|
meta.reverse()
|
2016-05-11 04:10:04 +00:00
|
|
|
|
2016-05-04 15:00:28 +00:00
|
|
|
return meta
|
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
def valid(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
filename validator
|
|
|
|
|
|
|
|
* check if the filename is YYYYMMDD.txt
|
|
|
|
'''
|
2016-05-11 04:10:04 +00:00
|
|
|
|
|
|
|
filesplit = os.path.splitext(os.path.basename(filename))
|
2016-05-20 04:06:21 +00:00
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
if filesplit[1] != ".txt":
|
|
|
|
return False
|
|
|
|
|
|
|
|
pattern = '^((19|20)\d{2})(0[1-9]|1[0-2])(0[1-9]|1\d|2\d|3[01])$'
|
|
|
|
|
|
|
|
if not re.match(pattern, filesplit[0]):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
def parse_date(file):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-05-27 07:21:11 +00:00
|
|
|
parses date out of pre-validated filename
|
2016-05-22 02:18:25 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
* assumes a filename of YYYYMMDD.txt
|
|
|
|
* returns a list:
|
|
|
|
[0] 'YYYY'
|
|
|
|
[1] 'MM'
|
|
|
|
[2] 'DD'
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
rawdate = os.path.splitext(os.path.basename(file))[0]
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
date = [rawdate[0:4], rawdate[4:6], rawdate[6:]]
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
return date
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
def find_ttbps():
|
|
|
|
'''
|
|
|
|
returns a list of users with a ttbp by checking for a valid ttbprc
|
|
|
|
'''
|
2016-05-20 04:32:50 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
users = []
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
for townie in os.listdir("/home"):
|
|
|
|
if os.path.exists(os.path.join("/home", townie, ".ttbp", "config", "ttbprc")):
|
|
|
|
users.append(townie)
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
return users
|
|
|
|
|
|
|
|
def publishing(username = USER):
|
|
|
|
'''
|
|
|
|
checks .ttbprc for whether or not user opted for www publishing
|
|
|
|
'''
|
|
|
|
|
|
|
|
ttbprc = {}
|
|
|
|
|
|
|
|
if username == USER:
|
|
|
|
ttbprc = SETTINGS
|
|
|
|
|
|
|
|
else:
|
|
|
|
ttbprc = json.load(open(os.path.join("/home", username, ".ttbp", "config", "ttbprc")))
|
|
|
|
|
|
|
|
return ttbprc.get("publishing")
|
|
|
|
|
|
|
|
def www_neighbors():
|
|
|
|
'''
|
|
|
|
takes a list of users with publiishing turned on and prepares it for www output
|
|
|
|
'''
|
|
|
|
|
|
|
|
userList = []
|
|
|
|
|
|
|
|
for user in find_ttbps():
|
|
|
|
if not publishing(user):
|
|
|
|
continue
|
|
|
|
|
|
|
|
userRC = json.load(open(os.path.join("/home", user, ".ttbp", "config", "ttbprc")))
|
|
|
|
|
2016-09-08 02:05:38 +00:00
|
|
|
url = ""
|
|
|
|
if userRC["publish dir"]:
|
|
|
|
url = LIVE+user+"/"+userRC["publish dir"]
|
2016-05-27 07:21:11 +00:00
|
|
|
|
|
|
|
lastfile = ""
|
|
|
|
files = os.listdir(os.path.join("/home", user, ".ttbp", "entries"))
|
|
|
|
files.sort()
|
|
|
|
for filename in files:
|
|
|
|
if valid(filename):
|
|
|
|
lastfile = os.path.join("/home", user, ".ttbp", "entries", filename)
|
|
|
|
|
|
|
|
if lastfile:
|
|
|
|
last = os.path.getctime(lastfile)
|
|
|
|
timestamp = time.strftime("%Y-%m-%d at %H:%M", time.localtime(last)) + " (utc"+time.strftime("%z")[0]+time.strftime("%z")[2]+")"
|
|
|
|
else:
|
|
|
|
timestamp = ""
|
|
|
|
last = 0
|
|
|
|
|
|
|
|
userList.append(["<a href=\""+url+"\">~"+user+"</a> "+timestamp, last])
|
|
|
|
|
|
|
|
# sort user by most recent entry
|
|
|
|
userList.sort(key = lambda userdata:userdata[1])
|
|
|
|
userList.reverse()
|
|
|
|
sortedUsers = []
|
|
|
|
for user in userList:
|
|
|
|
sortedUsers.append(user[0])
|
|
|
|
|
|
|
|
write_global_feed(sortedUsers)
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-31 19:03:34 +00:00
|
|
|
def nopub(filename):
|
|
|
|
'''
|
|
|
|
checks to see if given filename is in user's NOPUB
|
|
|
|
'''
|
|
|
|
|
|
|
|
exclude = []
|
|
|
|
|
|
|
|
if os.path.isfile(NOPUB):
|
|
|
|
for line in open(NOPUB, "r"):
|
|
|
|
exclude.append(line.rstrip())
|
|
|
|
|
|
|
|
return os.path.basename(filename) in exclude
|
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
#############
|
|
|
|
#############
|
|
|
|
#############
|
|
|
|
|
2016-05-04 15:00:28 +00:00
|
|
|
def test():
|
|
|
|
load()
|
|
|
|
|
|
|
|
metaTest = meta()
|
|
|
|
|
|
|
|
for x in metaTest:
|
|
|
|
print(x)
|