-
Notifications
You must be signed in to change notification settings - Fork 4
/
manage.py
132 lines (106 loc) · 3.93 KB
/
manage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import os
import sys
ROOT = os.path.abspath(os.path.dirname(__file__))
path = lambda *x: os.path.join(ROOT, *x)
# Ideally we'd be using a virtualenv, but this server wasn't written
# for that, so to forcibly use our version of vendor packages instead
# of locally-installed ones, we'll insert into sys.path. For more
# information, see http://stackoverflow.com/a/10097543.
sys.path.insert(1, path('vendor'))
sys.path.insert(1, path('.'))
from ezcommandline import arg, command, run
try:
import settings_local as settings
except ImportError:
import settings_env as settings
def make_storage(settings):
from hackpub.s3storage import S3Storage
return S3Storage(
access_key_id=settings.AWS_ACCESS_KEY_ID,
secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
bucket=settings.BUCKET_NAME,
publish_domain=settings.PUBLISH_DOMAIN
)
def BaseWSGIHandler(settings):
from hackpub.app import Application
app = Application(settings=settings, storage=make_storage(settings))
return app
def WSGIHandler():
from hackpub.multiplexer import Multiplexer
primary_app = BaseWSGIHandler(settings)
extra_buckets = {}
for bucket in settings.EXTRA_BUCKETS:
extra_settings = settings.EXTRA_BUCKETS[bucket]
extra_buckets[bucket] = BaseWSGIHandler(extra_settings)
return Multiplexer(primary_app, 'buckets', extra_buckets)
@arg('--port', help='port to serve on', type=int, default=8000)
@command
def runserver(args):
'run development server'
from wsgiref.simple_server import make_server
httpd = make_server('', args.port, WSGIHandler())
print 'serving on port %s' % args.port
httpd.serve_forever()
@arg('-p', '--pattern', help='test name pattern to match', default=None)
@command
def test(args):
'run tests'
import unittest
from hackpub.test import test_app, test_multiplexer
loader = unittest.defaultTestLoader
suite = test_app.load_tests(loader, unittest.TestSuite(), args.pattern)
suite.addTest(loader.loadTestsFromModule(test_multiplexer))
unittest.TextTestRunner(verbosity=1).run(suite)
@command
def test_s3storage(args):
'test S3Storage class against Amazon S3'
import hackpub.test.test_s3storage
hackpub.test.test_s3storage.run(settings)
@arg('-o', '--output-filename', help='filename to output to',
default='extract.csv')
@command
def extract(args):
'Export all published work metadata as a CSV file'
import cPickle as pickle
import csv
csvfile = open(args.output_filename, 'wb')
writer = csv.DictWriter(csvfile, ('published-url', 'original-url',
'size', 'created'))
entries = {}
cache_filename = settings.BUCKET_NAME + '.cache'
writer.writerow({
'published-url': 'Published URL',
'original-url': 'Original URL',
'size': 'Size',
'created': 'Date Created'
})
if os.path.exists(cache_filename):
cache = open(cache_filename, 'rb')
eof = False
while not eof:
try:
key, entry = pickle.load(cache)
writer.writerow(entry)
entries[key] = True
except EOFError:
eof = True
cache.close()
cache = open(cache_filename, 'ab')
storage = make_storage(settings)
for entry in storage:
if entry.key not in entries:
metadata = storage.get_metadata(entry.key)
entries[entry.key] = {
'created': metadata.get('created'),
'original-url': metadata.get('original-url'),
'published-url': metadata.get('published-url'),
'size': entry.size
}
pickle.dump([entry.key, entries[entry.key]], cache)
writer.writerow(entries[entry.key])
print "added %s" % entry.key
cache.close()
csvfile.close()
print "wrote %s." % args.output_filename
if __name__ == '__main__':
run()