Skip to content

Commit

Permalink
Merge branch 'internetarchive:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
benbdeitch authored Feb 14, 2024
2 parents 50b1dc9 + 2f59017 commit eb494cd
Show file tree
Hide file tree
Showing 52 changed files with 10,622 additions and 2,994 deletions.
21 changes: 21 additions & 0 deletions .github/workflows/auto_unassigner.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
name: auto_unassigner
on:
schedule: # Run at 0800 AM, daily (runs before new_comment_digest workflow).
- cron: '0 8 * * *'
workflow_dispatch: # This can be run on-demand (do we want this?)
permissions:
contents: read
issues: write

jobs:
auto_unassigner:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20'
- run: npm install @octokit/action
- run: node scripts/gh_scripts/auto_unassigner.mjs --repoOwner $GITHUB_REPOSITORY_OWNER
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
4 changes: 2 additions & 2 deletions .github/workflows/javascript_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
# Should match what's in our Dockerfile
node-version: '20' # Also update the `key` in the `with` map, below
- uses: actions/cache@v3
- uses: actions/cache@v4
id: npm-cache
with:
# Caching node_modules isn't recommended because it can break across
Expand Down
22 changes: 22 additions & 0 deletions .github/workflows/new_comment_digest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: new_comment_digest
on:
schedule: # 08:30 daily
- cron: '30 8 * * *'
workflow_dispatch:
permissions:
contents: read

jobs:
new_comment_digest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.x
- run: pip install requests
- run: scripts/gh_scripts/issue_comment_bot.py 24 "$SLACK_CHANNEL" "$SLACK_TOKEN"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL_ABC_TEAM_PLUS }}
2 changes: 1 addition & 1 deletion .github/workflows/python_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version-file: pyproject.toml
- uses: actions/cache@v3
- uses: actions/cache@v4
with:
path: ${{ env.pythonLocation }}
key: ${{ runner.os }}-venv-${{ env.pythonLocation }}-${{ hashFiles('requirements*.txt') }}
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ repos:
- id: codespell # See pyproject.toml for args
additional_dependencies:
- tomli
args: ["--skip", "*.mjs"]

- repo: https://github.com/MarcoGorelli/cython-lint
rev: v0.16.0
Expand Down
10 changes: 6 additions & 4 deletions compose.production.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ services:
user: root
command: docker/ol-nginx-start.sh
environment:
- CRONTAB_FILES="/etc/cron.d/archive-webserver-logs /etc/cron.d/certbot"
- NGINX_DOMAIN="covers.openlibrary.org"
- CRONTAB_FILES=/etc/cron.d/archive-webserver-logs /etc/cron.d/certbot
- NGINX_DOMAIN=covers.openlibrary.org
restart: unless-stopped
hostname: "$HOSTNAME"
depends_on:
Expand Down Expand Up @@ -173,8 +173,8 @@ services:
user: root
command: docker/ol-nginx-start.sh
environment:
- CRONTAB_FILES="/etc/cron.d/archive-webserver-logs /etc/cron.d/pull-sitemaps-from-ol-home0 /etc/cron.d/certbot"
- NGINX_DOMAIN="openlibrary.org"
- CRONTAB_FILES=/etc/cron.d/archive-webserver-logs /etc/cron.d/pull-sitemaps-from-ol-home0 /etc/cron.d/certbot
- NGINX_DOMAIN=openlibrary.org www.openlibrary.org
volumes:
# letsencrypt
- letsencrypt-data:/etc/letsencrypt
Expand All @@ -195,6 +195,8 @@ services:
# Archive nginx logs regularly
- ../olsystem/etc/cron.d/archive-webserver-logs:/etc/cron.d/archive-webserver-logs
- archive-webserver-logs-data:/archive-webserver-logs-data
# Sitemaps
- ../olsystem/etc/cron.d/pull-sitemaps-from-ol-home0:/etc/cron.d/pull-sitemaps-from-ol-home0
ports:
- 80:80
- 443:443
Expand Down
1 change: 1 addition & 0 deletions conf/coverstore.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ sentry:
enabled: false
# Dummy endpoint; where sentry logs are sent to
dsn: 'https://examplePublicKey@o0.ingest.sentry.io/0'
traces_sample_rate: 1.0
environment: 'local'
4 changes: 2 additions & 2 deletions docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ Note: please update this README with the exact wording of the error if you run i

The following should populate the target of the `infogami` symbolic link (i.e. `vendor/infogami/`):
```
cd local-openlibrary-dev-directory
cd path/to/your/cloned/openlibrary
git submodule init; git submodule sync; git submodule update
```

Expand All @@ -126,7 +126,7 @@ Ensure you're running `docker compose` commands from within the `local-openlibra
## Teardown commands

```sh
cd local-openlibrary-dev-directory
cd path/to/your/cloned/openlibrary
# stop the app (if started in detached mode)
docker compose down

Expand Down
15 changes: 13 additions & 2 deletions docker/ol-nginx-start.sh
Original file line number Diff line number Diff line change
@@ -1,9 +1,20 @@
#!/bin/bash

if [ -d "/etc/letsencrypt/live/$NGINX_DOMAIN" ] ; then
certbot certonly --webroot --webroot-path /openlibrary/static -d $NGINX_DOMAIN
# Create certs for domains missing them
RUN_CERTBOT=0
CERTBOT_OPTIONS=""
for domain in $NGINX_DOMAIN; do
CERTBOT_OPTIONS+=" -d $domain"
if [ ! -d "/etc/letsencrypt/live/$domain" ]; then
RUN_CERTBOT=1
fi
done

if [ "$RUN_CERTBOT" -eq 1 ]; then
certbot certonly --webroot --webroot-path /openlibrary/static $CERTBOT_OPTIONS
fi

# Run crontab if there are files
if [ -n "$CRONTAB_FILES" ] ; then
cat $CRONTAB_FILES | crontab -
service cron start
Expand Down
10 changes: 6 additions & 4 deletions openlibrary/components/MergeUI.vue
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ import { do_merge, update_merge_request, createMergeRequest, DEFAULT_EDITION_LIM
const DO_MERGE = 'Do Merge'
const REQUEST_MERGE = 'Request Merge'
const LOADING = 'Loading...'
const SAVING = 'Saving...'
export default {
name: 'app',
Expand All @@ -51,7 +53,7 @@ export default {
data() {
return {
url: new URL(location.toString()),
mergeStatus: 'Loading...',
mergeStatus: LOADING,
mergeOutput: null,
show_diffs: false,
comment: ''
Expand All @@ -78,8 +80,8 @@ export default {
const readyCta = this.isSuperLibrarian ? DO_MERGE : REQUEST_MERGE
this.$watch(
'$refs.mergeTable.merge',
(new_value, old_value) => {
if (new_value && new_value !== old_value) this.mergeStatus = readyCta;
(new_value) => {
if (new_value !== undefined && this.mergeStatus === LOADING) this.mergeStatus = readyCta;
}
);
},
Expand All @@ -88,7 +90,7 @@ export default {
if (!this.$refs.mergeTable.merge) return;
const { record: master, dupes, editions_to_move, unmergeable_works } = this.$refs.mergeTable.merge;
this.mergeStatus = 'Saving...';
this.mergeStatus = SAVING;
if (this.isSuperLibrarian) {
// Perform the merge and create new/update existing merge request
try {
Expand Down
75 changes: 35 additions & 40 deletions openlibrary/core/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time
import threading
import functools
from typing import Literal
from typing import Any, Literal, cast
from collections.abc import Callable

import memcache
Expand Down Expand Up @@ -334,15 +334,32 @@ def memcache(self):

return MockMemcacheClient()

def get(self, key):
key = web.safestr(key)
def _encode_key(self, key: str) -> str:
return cast(str, web.safestr(key))

def get(self, key: str) -> Any:
key = self._encode_key(key)
stats.begin("memcache.get", key=key)
value = self.memcache.get(key)
stats.end(hit=value is not None)
return value and json.loads(value)

def set(self, key, value, expires=0):
key = web.safestr(key)
def get_multi(self, keys: list[str]) -> dict[str, Any]:
keys = [self._encode_key(k) for k in keys]
stats.begin("memcache.get_multi")
d = self.memcache.get_multi(keys)
stats.end(hit=bool(d))
return {k: json.loads(v) for k, v in d.items()}

def set_multi(self, mapping: dict[str, Any], expires=0):
mapping = {self._encode_key(k): json.dumps(v) for k, v in mapping.items()}
stats.begin("memcache.set_multi")
d = self.memcache.set_multi(mapping, expires)
stats.end()
return d

def set(self, key: str, value: Any, expires=0):
key = cast(str, web.safestr(key))
value = json.dumps(value)
stats.begin("memcache.set", key=key)
value = self.memcache.set(key, value, expires)
Expand Down Expand Up @@ -394,7 +411,7 @@ def delete(self, key):


def get_memcache():
return memcache_cache.memcache
return memcache_cache


def _get_cache(engine):
Expand Down Expand Up @@ -464,23 +481,19 @@ def get_page(key):

def __init__(
self,
engine: Literal["memory", "memcache", "request"] = "memory",
key=None,
engine: Literal["memory", "memcache", "request"],
key: str | Callable[..., str | tuple],
expires: int = 0,
background: bool = False,
cacheable: Callable | None = None,
):
self.cache = _get_cache(engine)
self.keyfunc = self._make_key_func(key)
self.keyfunc = (
key if callable(key) else functools.partial(build_memcache_key, key)
)
self.cacheable = cacheable
self.expires = expires

def _make_key_func(self, key):
if isinstance(key, str):
return PrefixKeyFunc(key)
else:
return key

def __call__(self, f):
"""Returns the memoized version of f."""

Expand Down Expand Up @@ -544,33 +557,15 @@ def cache_set(self, key: str | tuple, value):
return self.cache.set(key, value, expires=self.expires)


class PrefixKeyFunc:
"""A function to generate cache keys using a prefix and arguments."""

def __init__(self, prefix):
self.prefix = prefix
def build_memcache_key(prefix: str, *args, **kw) -> str:
key = prefix

def __call__(self, *a, **kw):
return self.prefix + "-" + self.encode_args(a, kw)
if args:
key += "-" + json.dumps(args, separators=(",", ":"), sort_keys=True)[1:-1]
if kw:
key += "-" + json.dumps(kw, separators=(",", ":"), sort_keys=True)

def encode_args(self, args, kw=None):
kw = kw or {}
"""Encodes arguments to construct the memcache key.
"""
# strip [ and ] from key
a = self.json_encode(list(args))[1:-1]

if kw:
return a + "-" + self.json_encode(kw)
else:
return a

def json_encode(self, value):
"""json.dumps without extra spaces and consistent ordering of dictionary keys.
memcache doesn't like spaces in the key.
"""
return json.dumps(value, separators=(",", ":"), sort_keys=True)
return key


def method_memoize(f):
Expand Down
Loading

0 comments on commit eb494cd

Please sign in to comment.