2016-04-30 01:56:40 +00:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
ttbp: tilde town blogging platform
|
|
|
|
(also known as the feels engine)
|
|
|
|
a console-based blogging program developed for tilde.town
|
|
|
|
copyright (c) 2016 ~endorphant (endorphant@tilde.town)
|
|
|
|
|
|
|
|
core.py:
|
|
|
|
this is a core handler for some ttbp standalone/output functions
|
|
|
|
|
2016-06-15 01:51:49 +00:00
|
|
|
Permission is hereby granted, free of charge, to any person obtaining
|
|
|
|
a copy of this software and associated documentation files (the
|
|
|
|
"Software"), to deal in the Software without restriction, including
|
|
|
|
without limitation the rights to use, copy, modify, merge, publish,
|
|
|
|
distribute, sublicense, and/or sell copies of the Software, and to
|
|
|
|
permit persons to whom the Software is furnished to do so, subject to
|
|
|
|
the following conditions:
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be
|
|
|
|
included in all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
|
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
|
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
|
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
|
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
|
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
|
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
2016-05-22 02:18:25 +00:00
|
|
|
|
|
|
|
the complete codebase is available at:
|
|
|
|
https://github.com/modgethanc/ttbp
|
|
|
|
'''
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
import os
|
2016-05-02 15:26:51 +00:00
|
|
|
import time
|
2016-05-04 15:00:28 +00:00
|
|
|
import subprocess
|
2016-05-11 04:10:04 +00:00
|
|
|
import re
|
2016-05-15 05:19:44 +00:00
|
|
|
import mistune
|
2016-05-27 07:21:11 +00:00
|
|
|
import json
|
2016-05-02 15:26:51 +00:00
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
from . import chatter
|
|
|
|
from . import config
|
2018-03-16 02:41:22 +00:00
|
|
|
from . import gopher
|
2018-03-16 03:35:40 +00:00
|
|
|
from . import util
|
2017-11-21 06:02:10 +00:00
|
|
|
|
2018-01-02 03:33:21 +00:00
|
|
|
FEED = os.path.join("/home", "endorphant", "public_html", "ttbp", "index.html")
|
2016-05-27 07:21:11 +00:00
|
|
|
SETTINGS = {}
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
HEADER = ""
|
|
|
|
FOOTER = ""
|
|
|
|
FILES = []
|
2018-01-03 00:57:26 +00:00
|
|
|
NOPUBS = []
|
2016-05-01 22:26:26 +00:00
|
|
|
|
2016-05-27 07:31:46 +00:00
|
|
|
def load(ttbprc={}):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
get all them globals set up!!
|
|
|
|
'''
|
|
|
|
|
2016-05-01 22:26:26 +00:00
|
|
|
global HEADER
|
|
|
|
global FOOTER
|
2016-05-27 07:21:11 +00:00
|
|
|
global SETTINGS
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
HEADER = open(os.path.join(config.USER_CONFIG, "header.txt")).read()
|
|
|
|
FOOTER = open(os.path.join(config.USER_CONFIG, "footer.txt")).read()
|
2016-05-27 07:21:11 +00:00
|
|
|
SETTINGS = ttbprc
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2018-01-03 00:57:26 +00:00
|
|
|
load_nopubs()
|
2016-05-01 23:34:20 +00:00
|
|
|
load_files()
|
|
|
|
|
2016-06-10 02:42:08 +00:00
|
|
|
def reload_ttbprc(ttbprc={}):
|
|
|
|
'''
|
|
|
|
reloads new ttbprc into current session
|
|
|
|
'''
|
|
|
|
|
|
|
|
global SETTINGS
|
|
|
|
|
|
|
|
SETTINGS = ttbprc
|
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
def get_files(feelsdir=config.MAIN_FEELS):
|
|
|
|
"""Returns a list of user's feels in the given directory (defaults to main
|
|
|
|
feels dir)"""
|
2017-12-04 03:37:43 +00:00
|
|
|
|
|
|
|
files = []
|
2018-03-16 02:41:22 +00:00
|
|
|
for filename in os.listdir(feelsdir):
|
2017-12-04 03:37:43 +00:00
|
|
|
if nopub(filename):
|
2018-03-16 02:41:22 +00:00
|
|
|
unpublish_feel(filename)
|
2018-03-16 04:39:27 +00:00
|
|
|
else:
|
|
|
|
filename = os.path.join(feelsdir, filename)
|
|
|
|
if os.path.isfile(filename) and valid(filename):
|
|
|
|
files.append(filename)
|
2017-12-04 03:37:43 +00:00
|
|
|
|
|
|
|
files.sort()
|
|
|
|
files.reverse()
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
def load_files(feelsdir=config.MAIN_FEELS):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
file loader
|
|
|
|
|
|
|
|
* reads user's nopub file
|
2018-03-16 02:41:22 +00:00
|
|
|
* calls get_files() to load all files for given directory
|
2018-03-16 04:39:27 +00:00
|
|
|
* re-renders main html file and/or gopher if needed
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
|
2016-05-01 23:34:20 +00:00
|
|
|
global FILES
|
|
|
|
|
2018-01-03 00:57:26 +00:00
|
|
|
load_nopubs()
|
2018-03-16 02:41:22 +00:00
|
|
|
FILES = get_files(feelsdir)
|
2018-03-22 21:07:07 +00:00
|
|
|
|
2018-03-16 03:35:40 +00:00
|
|
|
if publishing():
|
|
|
|
write_html("index.html")
|
|
|
|
if SETTINGS.get('gopher'):
|
|
|
|
gopher.publish_gopher('feels', FILES)
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2018-01-03 00:57:26 +00:00
|
|
|
def load_nopubs():
|
|
|
|
"""Load a list of the user's nopub entries.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global NOPUBS
|
|
|
|
|
|
|
|
NOPUBS = []
|
|
|
|
|
|
|
|
if os.path.isfile(config.NOPUB):
|
|
|
|
for line in open(config.NOPUB, "r"):
|
|
|
|
if not re.match("^# ", line):
|
|
|
|
NOPUBS.append(line.rstrip())
|
|
|
|
|
|
|
|
return len(NOPUBS)
|
2016-05-27 07:21:11 +00:00
|
|
|
|
|
|
|
## html outputting
|
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
def write_html(outurl="default.html"):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
main page renderer
|
|
|
|
|
|
|
|
* takes everything currently in FILES and writes a single non-paginated html
|
|
|
|
file
|
|
|
|
* calls write_page() on each file to make permalinks
|
|
|
|
'''
|
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
outfile = open(os.path.join(config.WWW, outurl), "w")
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("<!--generated by the tilde.town blogging platform on "+time.strftime("%d %B %y")+"\nhttp://tilde.town/~endorphant/ttbp/-->\n\n")
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
for line in HEADER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("\n")
|
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
for filename in FILES:
|
|
|
|
write_page(filename)
|
|
|
|
for line in write_entry(filename):
|
2016-04-30 01:56:40 +00:00
|
|
|
outfile.write(line)
|
|
|
|
|
2016-05-02 15:26:51 +00:00
|
|
|
outfile.write("\n")
|
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
for line in FOOTER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.close()
|
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
return os.path.join(config.LIVE+config.USER,os.path.basename(os.path.realpath(config.WWW)),outurl)
|
2016-04-30 04:00:55 +00:00
|
|
|
|
2016-05-03 17:14:53 +00:00
|
|
|
def write_page(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
permalink generator
|
|
|
|
|
|
|
|
* makes a page out of a single entry for permalinking, using filename/date as
|
|
|
|
url
|
|
|
|
'''
|
2016-05-03 17:14:53 +00:00
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
outurl = os.path.join(config.WWW, "".join(util.parse_date(filename))+".html")
|
2016-05-03 17:14:53 +00:00
|
|
|
outfile = open(outurl, "w")
|
|
|
|
|
|
|
|
outfile.write("<!--generated by the tilde.town blogging platform on "+time.strftime("%d %B %y")+"\nhttp://tilde.town/~endorphant/ttbp/-->\n\n")
|
|
|
|
|
|
|
|
for line in HEADER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.write("\n")
|
|
|
|
|
|
|
|
for line in write_entry(filename):
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.write("\n")
|
|
|
|
|
|
|
|
for line in FOOTER:
|
|
|
|
outfile.write(line)
|
|
|
|
|
|
|
|
outfile.close()
|
|
|
|
|
|
|
|
return outurl
|
|
|
|
|
|
|
|
def write_entry(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
entry text generator
|
|
|
|
|
|
|
|
* dump given file into entry format by parsing file as markdown
|
|
|
|
* return as list of strings
|
|
|
|
'''
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
date = util.parse_date(filename)
|
2016-04-30 02:09:30 +00:00
|
|
|
|
2016-04-30 01:56:40 +00:00
|
|
|
entry = [
|
2016-04-30 02:09:30 +00:00
|
|
|
"\t\t<p><a name=\""+date[0]+date[1]+date[2]+"\"></a><br /><br /></p>\n",
|
2016-04-30 01:56:40 +00:00
|
|
|
"\t\t<div class=\"entry\">\n",
|
2016-05-15 06:04:02 +00:00
|
|
|
"\t\t\t<h5><a href=\"#"+date[0]+date[1]+date[2]+"\">"+date[2]+"</a> "+chatter.month(date[1])+" "+date[0]+"</h5>\n"
|
2016-05-15 05:19:44 +00:00
|
|
|
#"\t\t\t<P>"
|
2016-04-30 01:56:40 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
raw = []
|
2018-03-16 02:41:22 +00:00
|
|
|
rawfile = open(os.path.join(config.MAIN_FEELS, filename), "r")
|
2016-04-30 01:56:40 +00:00
|
|
|
|
|
|
|
for line in rawfile:
|
|
|
|
raw.append(line)
|
|
|
|
rawfile.close()
|
|
|
|
|
2016-05-15 05:19:44 +00:00
|
|
|
entry.append("\t\t\t"+mistune.markdown("".join(raw), escape=False, hard_wrap=False))
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2016-05-15 05:19:44 +00:00
|
|
|
#for line in raw:
|
|
|
|
#entry.append(line+"\t\t\t")
|
|
|
|
#if line == "\n":
|
|
|
|
# entry.append("</p>\n\t\t\t<p>")
|
|
|
|
|
|
|
|
#entry.append("</p>\n")
|
2018-02-08 13:03:09 +00:00
|
|
|
entry.append("\t\t\t<p class=\"permalink\"><a href=\""+"".join(date)+".html\">permalink</a></p>\n")
|
2016-05-03 17:14:53 +00:00
|
|
|
entry.append("\n\t\t</div>\n")
|
2016-04-30 01:56:40 +00:00
|
|
|
|
|
|
|
return entry
|
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
def write_global_feed(blogList):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-05-27 07:21:11 +00:00
|
|
|
main ttbp index printer
|
2016-05-22 02:18:25 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
* sources README.md for documentation
|
|
|
|
* takes incoming list of formatted blog links for all publishing blogs and
|
|
|
|
prints to blog feed
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
2016-04-30 01:56:40 +00:00
|
|
|
|
2018-02-28 18:16:51 +00:00
|
|
|
try:
|
|
|
|
outfile = open(FEED, "w")
|
|
|
|
|
|
|
|
## header
|
|
|
|
outfile.write("""\
|
|
|
|
<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 3.2//EN\">
|
|
|
|
<html>
|
|
|
|
<head>
|
|
|
|
<title>tilde.town feels engine</title>
|
|
|
|
<link rel=\"stylesheet\" href=\"style.css\" />
|
|
|
|
</head>
|
|
|
|
<body>
|
|
|
|
<div class="meta">
|
|
|
|
<h1>tilde.town feels engine</h1>
|
|
|
|
|
|
|
|
<h2><a href="https://github.com/modgethanc/ttbp">github
|
|
|
|
repo</a> | <a
|
|
|
|
href="http://tilde.town/~endorphant/blog/20160510.html">state
|
|
|
|
of the ttbp</a></h2>
|
|
|
|
<!--<p>curious? run <b>~endorphant/bin/ttbp</b> while logged in to tilde.town.</p>
|
|
|
|
<p>it's still a little volatile. let me know if anything breaks.</p>---></div>
|
|
|
|
<p> </p>
|
|
|
|
""")
|
|
|
|
|
|
|
|
## docs
|
|
|
|
outfile.write("""\
|
|
|
|
<div class="docs">""")
|
|
|
|
outfile.write(mistune.markdown(open(os.path.join(config.INSTALL_PATH, "..", "README.md"), "r").read()))
|
|
|
|
outfile.write("""\
|
|
|
|
</div>""")
|
|
|
|
|
|
|
|
## feed
|
|
|
|
outfile.write("""\
|
|
|
|
<p> </p>
|
|
|
|
<div class=\"feed\">
|
|
|
|
<h3>live feels-sharing:</h3>
|
|
|
|
<ul>""")
|
|
|
|
for blog in blogList:
|
|
|
|
outfile.write("""
|
|
|
|
<li>"""+blog+"""</li>\
|
|
|
|
""")
|
|
|
|
|
|
|
|
## footer
|
2016-05-27 07:21:11 +00:00
|
|
|
outfile.write("""
|
2018-02-28 18:16:51 +00:00
|
|
|
</ul>
|
|
|
|
</div>
|
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
""")
|
2016-05-27 07:21:11 +00:00
|
|
|
|
2018-02-28 18:16:51 +00:00
|
|
|
outfile.close()
|
|
|
|
#subprocess.call(['chmod', 'a+w', FEED])
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2016-05-27 07:21:11 +00:00
|
|
|
|
|
|
|
## misc helpers
|
2016-05-04 15:00:28 +00:00
|
|
|
|
|
|
|
def meta(entries = FILES):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
metadata generator
|
|
|
|
|
|
|
|
* takes a list of filenames and returns a 2d list:
|
|
|
|
[0] absolute path
|
2016-05-22 04:45:04 +00:00
|
|
|
[1] mtime
|
2016-05-22 02:18:25 +00:00
|
|
|
[2] wc -w
|
|
|
|
[3] timestamp "DD month YYYY at HH:MM"
|
|
|
|
[4] entry date YYYY-MM-DD
|
|
|
|
[5] author
|
|
|
|
|
|
|
|
* sorted in reverse date order by [4]
|
|
|
|
'''
|
2016-05-04 15:00:28 +00:00
|
|
|
|
|
|
|
meta = []
|
|
|
|
|
2016-05-05 00:04:16 +00:00
|
|
|
for filename in entries:
|
2016-05-22 04:45:04 +00:00
|
|
|
mtime = os.path.getmtime(filename)
|
2016-08-31 18:36:37 +00:00
|
|
|
try:
|
2018-02-28 18:16:51 +00:00
|
|
|
wc = int(subprocess.check_output(["wc","-w",filename], stderr=subprocess.STDOUT).split()[0])
|
2016-08-31 18:36:37 +00:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
wc = "???"
|
2016-05-22 04:45:04 +00:00
|
|
|
timestamp = time.strftime("%Y-%m-%d at %H:%M", time.localtime(mtime))
|
2018-03-16 02:41:22 +00:00
|
|
|
date = "-".join(util.parse_date(filename))
|
2016-05-05 00:04:16 +00:00
|
|
|
author = os.path.split(os.path.split(os.path.split(os.path.split(filename)[0])[0])[0])[1]
|
|
|
|
|
2016-05-22 04:45:04 +00:00
|
|
|
meta.append([filename, mtime, wc, timestamp, date, author])
|
2016-05-04 15:00:28 +00:00
|
|
|
|
2016-10-06 15:38:14 +00:00
|
|
|
#meta.sort(key = lambda filename:filename[4])
|
|
|
|
#meta.reverse()
|
2016-05-11 04:10:04 +00:00
|
|
|
|
2016-05-04 15:00:28 +00:00
|
|
|
return meta
|
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
def valid(filename):
|
2016-05-22 02:18:25 +00:00
|
|
|
'''
|
|
|
|
filename validator
|
|
|
|
|
|
|
|
* check if the filename is YYYYMMDD.txt
|
|
|
|
'''
|
2016-05-11 04:10:04 +00:00
|
|
|
|
|
|
|
filesplit = os.path.splitext(os.path.basename(filename))
|
2016-05-20 04:06:21 +00:00
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
if filesplit[1] != ".txt":
|
|
|
|
return False
|
|
|
|
|
|
|
|
pattern = '^((19|20)\d{2})(0[1-9]|1[0-2])(0[1-9]|1\d|2\d|3[01])$'
|
|
|
|
|
|
|
|
if not re.match(pattern, filesplit[0]):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
def find_ttbps():
|
|
|
|
'''
|
|
|
|
returns a list of users with a ttbp by checking for a valid ttbprc
|
|
|
|
'''
|
2016-05-20 04:32:50 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
users = []
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
for townie in os.listdir("/home"):
|
|
|
|
if os.path.exists(os.path.join("/home", townie, ".ttbp", "config", "ttbprc")):
|
|
|
|
users.append(townie)
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-27 07:21:11 +00:00
|
|
|
return users
|
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
def publishing(username=config.USER):
|
2016-05-27 07:21:11 +00:00
|
|
|
'''
|
|
|
|
checks .ttbprc for whether or not user opted for www publishing
|
|
|
|
'''
|
|
|
|
|
|
|
|
ttbprc = {}
|
|
|
|
|
2017-11-21 06:02:10 +00:00
|
|
|
if username == config.USER:
|
2016-05-27 07:21:11 +00:00
|
|
|
ttbprc = SETTINGS
|
|
|
|
|
|
|
|
else:
|
|
|
|
ttbprc = json.load(open(os.path.join("/home", username, ".ttbp", "config", "ttbprc")))
|
|
|
|
|
|
|
|
return ttbprc.get("publishing")
|
|
|
|
|
|
|
|
def www_neighbors():
|
|
|
|
'''
|
|
|
|
takes a list of users with publiishing turned on and prepares it for www output
|
|
|
|
'''
|
|
|
|
|
|
|
|
userList = []
|
|
|
|
|
|
|
|
for user in find_ttbps():
|
|
|
|
if not publishing(user):
|
|
|
|
continue
|
|
|
|
|
|
|
|
userRC = json.load(open(os.path.join("/home", user, ".ttbp", "config", "ttbprc")))
|
|
|
|
|
2016-09-08 02:05:38 +00:00
|
|
|
url = ""
|
|
|
|
if userRC["publish dir"]:
|
2017-11-21 06:02:10 +00:00
|
|
|
url = config.LIVE+user+"/"+userRC["publish dir"]
|
2016-05-27 07:21:11 +00:00
|
|
|
|
|
|
|
lastfile = ""
|
2018-02-25 21:26:58 +00:00
|
|
|
try:
|
|
|
|
files = os.listdir(os.path.join("/home", user, ".ttbp", "entries"))
|
|
|
|
except OSError:
|
|
|
|
files = []
|
2016-05-27 07:21:11 +00:00
|
|
|
files.sort()
|
|
|
|
for filename in files:
|
|
|
|
if valid(filename):
|
|
|
|
lastfile = os.path.join("/home", user, ".ttbp", "entries", filename)
|
|
|
|
|
|
|
|
if lastfile:
|
|
|
|
last = os.path.getctime(lastfile)
|
|
|
|
timestamp = time.strftime("%Y-%m-%d at %H:%M", time.localtime(last)) + " (utc"+time.strftime("%z")[0]+time.strftime("%z")[2]+")"
|
|
|
|
else:
|
|
|
|
timestamp = ""
|
|
|
|
last = 0
|
|
|
|
|
|
|
|
userList.append(["<a href=\""+url+"\">~"+user+"</a> "+timestamp, last])
|
|
|
|
|
|
|
|
# sort user by most recent entry
|
|
|
|
userList.sort(key = lambda userdata:userdata[1])
|
|
|
|
userList.reverse()
|
|
|
|
sortedUsers = []
|
|
|
|
for user in userList:
|
|
|
|
sortedUsers.append(user[0])
|
|
|
|
|
2017-11-23 04:22:26 +00:00
|
|
|
write_global_feed(sortedUsers)
|
2016-05-15 06:36:42 +00:00
|
|
|
|
2016-05-31 19:03:34 +00:00
|
|
|
def nopub(filename):
|
|
|
|
'''
|
|
|
|
checks to see if given filename is in user's NOPUB
|
|
|
|
'''
|
|
|
|
|
2018-01-03 00:57:26 +00:00
|
|
|
return os.path.basename(filename) in NOPUBS
|
2016-05-31 19:03:34 +00:00
|
|
|
|
2018-01-03 00:13:38 +00:00
|
|
|
def toggle_nopub(filename):
|
|
|
|
"""toggles pub/nopub status for the given filename
|
2018-01-03 00:57:26 +00:00
|
|
|
|
|
|
|
if the file is to be unpublished, delete it from published locations
|
2018-01-03 00:13:38 +00:00
|
|
|
"""
|
|
|
|
|
2018-01-03 00:57:26 +00:00
|
|
|
global NOPUBS
|
|
|
|
|
|
|
|
action = "unpublishing"
|
|
|
|
|
|
|
|
if nopub(filename):
|
2018-01-03 05:38:16 +00:00
|
|
|
action = "publishing"
|
2018-01-03 00:57:26 +00:00
|
|
|
NOPUBS.remove(filename)
|
2018-01-03 05:38:16 +00:00
|
|
|
else:
|
|
|
|
NOPUBS.append(filename)
|
2018-03-16 02:41:22 +00:00
|
|
|
unpublish_feel(filename)
|
2018-01-03 00:57:26 +00:00
|
|
|
|
|
|
|
nopub_file = open(config.NOPUB, 'w')
|
|
|
|
nopub_file.write("""\
|
|
|
|
# files that don't get published html/gopher. this file is
|
|
|
|
# generated by ttbp; editing it directly may result in unexpected
|
|
|
|
# behavior. if you have problems, back up this file, delete it, and
|
|
|
|
# rebuild it from ttbp.\n""")
|
|
|
|
for entry in NOPUBS:
|
|
|
|
nopub_file.write(entry+"\n")
|
|
|
|
nopub_file.close()
|
|
|
|
|
|
|
|
load_files()
|
|
|
|
|
|
|
|
return action
|
|
|
|
|
2018-03-16 02:41:22 +00:00
|
|
|
def bury_feel(filename):
|
|
|
|
"""buries given filename; this removes the feel from any publicly-readable
|
2018-03-22 21:07:07 +00:00
|
|
|
location, and moves the textfile to user's private feels directory.
|
|
|
|
|
|
|
|
timestring will be added to the filename to disambiguate and prevent
|
|
|
|
filename collisions.
|
|
|
|
|
|
|
|
creates buried feels dir if it doesn't exist.
|
|
|
|
|
|
|
|
regenerates feels list and republishes."""
|
|
|
|
|
|
|
|
if not os.path.exists(config.BURIED_FEELS):
|
|
|
|
os.mkdir(config.BURIED_FEELS)
|
|
|
|
subprocess.call(["chmod", "700", config.BURIED_FEELS])
|
|
|
|
|
|
|
|
buryname = os.path.splitext(os.path.basename(filename))[0]+"-"+str(int(time.time()))+".txt"
|
|
|
|
|
|
|
|
subprocess.call(["mv", os.path.join(config.MAIN_FEELS, filename), os.path.join(config.BURIED_FEELS, buryname)])
|
|
|
|
subprocess.call(["chmod", "600", os.path.join(config.BURIED_FEELS, buryname)])
|
|
|
|
|
|
|
|
if publishing():
|
|
|
|
unpublish_feel(filename)
|
2018-03-16 02:41:22 +00:00
|
|
|
|
2018-03-22 21:07:07 +00:00
|
|
|
load_files()
|
|
|
|
|
|
|
|
return os.path.join(config.BURIED_FEELS, buryname)
|
2018-03-16 02:41:22 +00:00
|
|
|
|
|
|
|
def delete_feel(filename):
|
|
|
|
"""deletes given filename; removes the feel from publicly-readable
|
|
|
|
locations, then deletes the original file."""
|
|
|
|
|
2018-03-16 04:39:27 +00:00
|
|
|
feel = os.path.join(config.MAIN_FEELS, filename)
|
|
|
|
if os.path.exists(feel):
|
|
|
|
subprocess.call(["rm", feel])
|
2018-03-22 21:07:07 +00:00
|
|
|
unpublish_feel(filename)
|
2018-03-16 04:39:27 +00:00
|
|
|
load_files(config.MAIN_FEELS)
|
2018-03-16 02:41:22 +00:00
|
|
|
|
|
|
|
def unpublish_feel(filename):
|
|
|
|
"""takes given filename and removes it from public_html and gopher_html, if
|
|
|
|
those locations exists. afterwards, regenerate index files appropriately."""
|
|
|
|
|
|
|
|
live_html = os.path.join(config.WWW,
|
|
|
|
os.path.splitext(os.path.basename(filename))[0]+".html")
|
|
|
|
if os.path.exists(live_html):
|
|
|
|
subprocess.call(["rm", live_html])
|
|
|
|
live_gopher = os.path.join(config.GOPHER_PATH, filename)
|
|
|
|
if os.path.exists(live_gopher):
|
|
|
|
subprocess.call(["rm", live_gopher])
|
2018-01-03 00:13:38 +00:00
|
|
|
|
2018-03-17 02:38:01 +00:00
|
|
|
def process_backup(filename):
|
2018-03-22 16:55:04 +00:00
|
|
|
"""takes given filename and unpacks it into a temp directory, then returns a
|
|
|
|
list of filenames with collisions filtered out.
|
2018-03-17 02:38:01 +00:00
|
|
|
|
|
|
|
ignores any invalidly named files or files that already exist, to avoid
|
|
|
|
clobbering current feels. ignored files are left in the archive directory
|
|
|
|
for the user to manually sort out."""
|
|
|
|
|
|
|
|
backup_dir = os.path.splitext(os.path.splitext(os.path.basename(filename))[0])[0]
|
|
|
|
backup_path = os.path.join(config.BACKUPS, backup_dir)
|
|
|
|
|
2018-03-22 03:34:34 +00:00
|
|
|
if not os.path.exists(backup_path):
|
|
|
|
subprocess.call(["mkdir", backup_path])
|
|
|
|
|
2018-03-22 20:27:35 +00:00
|
|
|
subprocess.call(["chmod", "700", backup_path])
|
2018-03-22 03:34:34 +00:00
|
|
|
subprocess.call(["tar", "-C", backup_path, "-xf", filename])
|
2018-03-17 02:38:01 +00:00
|
|
|
backup_entries = os.path.join(backup_path, "entries")
|
|
|
|
|
2018-03-22 03:34:34 +00:00
|
|
|
backups = os.listdir(backup_entries)
|
|
|
|
current = os.listdir(config.MAIN_FEELS)
|
|
|
|
|
|
|
|
imported = []
|
2018-03-17 02:38:01 +00:00
|
|
|
|
|
|
|
for feel in backups:
|
2018-03-22 03:34:34 +00:00
|
|
|
if os.path.basename(feel) not in current:
|
2018-03-22 16:55:04 +00:00
|
|
|
imported.append(os.path.join(backup_entries, feel))
|
2018-03-22 03:34:34 +00:00
|
|
|
|
2018-03-22 16:55:04 +00:00
|
|
|
imported.sort()
|
2018-03-22 03:34:34 +00:00
|
|
|
return imported
|
2018-03-17 02:38:01 +00:00
|
|
|
|
2018-03-22 03:34:34 +00:00
|
|
|
def import_feels(backups):
|
|
|
|
"""takes a list of filepaths and copies those to current main feels.
|
|
|
|
|
|
|
|
this does not check for collisions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
pass
|
2018-03-17 02:38:01 +00:00
|
|
|
|
|
|
|
|
2016-05-11 04:10:04 +00:00
|
|
|
#############
|
|
|
|
#############
|
|
|
|
#############
|
|
|
|
|
2016-05-04 15:00:28 +00:00
|
|
|
def test():
|
|
|
|
load()
|
|
|
|
|
|
|
|
metaTest = meta()
|
|
|
|
|
|
|
|
for x in metaTest:
|
|
|
|
print(x)
|